Cloning into '/tmp/jaeger-tests'... Switched to a new branch 'e2e-tests' branch 'e2e-tests' set up to track 'origin/e2e-tests'. Installing kuttl Try 0... curl -sLo /tmp/jaeger-tests/hack/install/../../bin/kubectl-kuttl https://github.com/kudobuilder/kuttl/releases/download/v0.15.0/kubectl-kuttl_0.15.0_linux_x86_64 KUBECONFIG file is: /tmp/kubeconfig-3414875983 for suite in elasticsearch examples generate miscellaneous sidecar streaming ui upgrade; do \ make run-e2e-tests-$suite ; \ done make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh elasticsearch false true + '[' 3 -ne 3 ']' + test_suite_name=elasticsearch + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/elasticsearch.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-elasticsearch make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true \ KAFKA_VERSION=3.6.0 \ SKIP_KAFKA=false \ ./tests/e2e/elasticsearch/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.19.0-0.nightly-2025-03-09-063419 True False 6m36s Cluster version is 4.19.0-0.nightly-2025-03-09-063419' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.19.0-0.nightly-2025-03-09-063419 True False 6m36s Cluster version is 4.19.0-0.nightly-2025-03-09-063419' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/elasticsearch/render.sh ++ export SUITE_DIR=./tests/e2e/elasticsearch ++ SUITE_DIR=./tests/e2e/elasticsearch ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate Try 0... curl -sLo /tmp/jaeger-tests/hack/install/../../bin/gomplate https://github.com/hairyhenderson/gomplate/releases/download/v3.10.0/gomplate_linux-amd64-slim ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq Try 0... curl -sLo /tmp/jaeger-tests/hack/install/../../bin/yq https://github.com/mikefarah/yq/releases/download/v4.20.2/yq_linux_amd64 ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize Try 0... curl -sLo /tmp/kustomize.tar.gz https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv4.5.7/kustomize_v4.5.7_linux_amd64.tar.gz Try 1... curl -sLo /tmp/kustomize.tar.gz https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv4.5.7/kustomize_v4.5.7_linux_amd64.tar.gz Try 2... curl -sLo /tmp/kustomize.tar.gz https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv4.5.7/kustomize_v4.5.7_linux_amd64.tar.gz Try 3... curl -sLo /tmp/kustomize.tar.gz https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv4.5.7/kustomize_v4.5.7_linux_amd64.tar.gz Try 4... curl -sLo /tmp/kustomize.tar.gz https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv4.5.7/kustomize_v4.5.7_linux_amd64.tar.gz ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/elasticsearch ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + start_test es-from-aio-to-production + '[' 1 -ne 1 ']' + test_name=es-from-aio-to-production + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-from-aio-to-production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-from-aio-to-production\e[0m' Rendering files for test es-from-aio-to-production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-from-aio-to-production + cd es-from-aio-to-production + jaeger_name=my-jaeger + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 03 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=03 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./03-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch.redundancyPolicy="ZeroRedundancy"' ./03-install.yaml + render_smoke_test my-jaeger true 04 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=04 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./04-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test es-increasing-replicas + '[' 1 -ne 1 ']' + test_name=es-increasing-replicas + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-increasing-replicas' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-increasing-replicas\e[0m' Rendering files for test es-increasing-replicas + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-from-aio-to-production + '[' es-from-aio-to-production '!=' _build ']' + cd .. + mkdir -p es-increasing-replicas + cd es-increasing-replicas + jaeger_name=simple-prod + '[' true = true ']' + jaeger_deployment_mode=production_autoprovisioned + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.replicas=2 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.query.replicas=2 ./02-install.yaml + cp ./01-assert.yaml ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.replicas=2 ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .status.readyReplicas=2 ./02-assert.yaml + render_smoke_test simple-prod true 03 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=03 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./03-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + cp ./02-install.yaml ./04-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.elasticsearch.nodeCount=2 ./04-install.yaml + /tmp/jaeger-tests/bin/gomplate -f ./openshift-check-es-nodes.yaml.template -o ./05-check-es-nodes.yaml + '[' true = true ']' + skip_test es-index-cleaner-upstream 'SKIP_ES_EXTERNAL is true' + '[' 2 -ne 2 ']' + test_name=es-index-cleaner-upstream + message='SKIP_ES_EXTERNAL is true' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-increasing-replicas + '[' es-increasing-replicas '!=' _build ']' + cd .. + rm -rf es-index-cleaner-upstream + warning 'es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true\e[0m' WAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true + '[' true = true ']' + es_index_cleaner -autoprov production_autoprovisioned + '[' 2 -ne 2 ']' + postfix=-autoprov + jaeger_deployment_strategy=production_autoprovisioned + start_test es-index-cleaner-autoprov + '[' 1 -ne 1 ']' + test_name=es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-index-cleaner-autoprov' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-index-cleaner-autoprov\e[0m' Rendering files for test es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-index-cleaner-autoprov + cd es-index-cleaner-autoprov + jaeger_name=test-es-index-cleaner-with-prefix + cronjob_name=test-es-index-cleaner-with-prefix-es-index-cleaner + secured_es_connection=false + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_managed_es ']' + ELASTICSEARCH_URL=https://elasticsearch + secured_es_connection=true + cp ../../es-index-cleaner-upstream/04-assert.yaml ../../es-index-cleaner-upstream/README.md . + render_install_jaeger test-es-index-cleaner-with-prefix production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options.es.index-prefix=""' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.enabled=false ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.numberOfDays=0 ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.esIndexCleaner.schedule="*/1 * * * *"' ./01-install.yaml + render_report_spans test-es-index-cleaner-with-prefix true 5 00 true 02 + '[' 6 -ne 6 ']' + jaeger=test-es-index-cleaner-with-prefix + is_secured=true + number_of_spans=5 + job_number=00 + ensure_reported_spans=true + test_step=02 + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + export JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + export JOB_NUMBER=00 + JOB_NUMBER=00 + export DAYS=5 + DAYS=5 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./02-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./02-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + sed 's~enabled: false~enabled: true~gi' ./01-install.yaml + CRONJOB_NAME=test-es-index-cleaner-with-prefix-es-index-cleaner + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/wait-for-cronjob-execution.yaml.template -o ./04-wait-es-index-cleaner.yaml + /tmp/jaeger-tests/bin/gomplate -f ./01-install.yaml -o ./05-install.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' 00 06 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + job_number=00 + test_step=06 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ sed 's/\\/\\\\/g' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=test-es-index-cleaner-with-prefix-curator + JOB_NUMBER=00 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + MOUNT_SECRET=test-es-index-cleaner-with-prefix-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./06-check-indices.yaml + JOB_NUMBER=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./06-assert.yaml + '[' true = true ']' + get_elasticsearch_openshift_operator_version + export ESO_OPERATOR_VERSION + '[' true = true ']' ++ kubectl get pods -l name=elasticsearch-operator --all-namespaces '-o=jsonpath={.items[0].metadata.annotations.operatorframework\.io/properties}' + properties='{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.18},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.18"}}]}' + '[' -z '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.18},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.18"}}]}' ']' ++ echo '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.18},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.18"}}]}' ++ /tmp/jaeger-tests/bin/yq e -P '.properties.[] | select(.value.packageName == "elasticsearch-operator") | .value.version' + ESO_OPERATOR_VERSION=5.8.18 ++ version_ge 5.8.18 5.4 +++ echo 5.8.18 5.4 +++ sort -rV +++ head -n 1 +++ tr ' ' '\n' ++ test 5.8.18 == 5.8.18 + '[' -n '' ']' + skip_test es-index-cleaner-managed 'Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 2 -ne 2 ']' + test_name=es-index-cleaner-managed + message='Test only supported with Elasticsearch OpenShift Operator >= 5.4' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-index-cleaner-autoprov + '[' es-index-cleaner-autoprov '!=' _build ']' + cd .. + rm -rf es-index-cleaner-managed + warning 'es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4\e[0m' WAR: es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4 + '[' true = true ']' + start_test es-multiinstance + '[' 1 -ne 1 ']' + test_name=es-multiinstance + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-multiinstance' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-multiinstance\e[0m' Rendering files for test es-multiinstance + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-multiinstance + cd es-multiinstance + jaeger_name=instance-1 + render_install_jaeger instance-1 production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=instance-1 + JAEGER_NAME=instance-1 + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/gomplate -f ./03-create-second-instance.yaml.template -o 03-create-second-instance.yaml + '[' true = true ']' + skip_test es-rollover-upstream 'SKIP_ES_EXTERNAL is true' + '[' 2 -ne 2 ']' + test_name=es-rollover-upstream + message='SKIP_ES_EXTERNAL is true' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-multiinstance + '[' es-multiinstance '!=' _build ']' + cd .. + rm -rf es-rollover-upstream + warning 'es-rollover-upstream: SKIP_ES_EXTERNAL is true' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-rollover-upstream: SKIP_ES_EXTERNAL is true\e[0m' WAR: es-rollover-upstream: SKIP_ES_EXTERNAL is true + '[' true = true ']' + es_rollover -autoprov production_autoprovisioned + '[' 2 -ne 2 ']' + postfix=-autoprov + jaeger_deployment_strategy=production_autoprovisioned + start_test es-rollover-autoprov + '[' 1 -ne 1 ']' + test_name=es-rollover-autoprov + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-rollover-autoprov' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-rollover-autoprov\e[0m' Rendering files for test es-rollover-autoprov + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-rollover-autoprov + cd es-rollover-autoprov + cp ../../es-rollover-upstream/05-assert.yaml ../../es-rollover-upstream/05-install.yaml ../../es-rollover-upstream/README.md . + jaeger_name=my-jaeger + secured_es_connection=false + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_managed_es ']' + ELASTICSEARCH_URL=https://elasticsearch + secured_es_connection=true + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_report_spans my-jaeger true 2 00 true 02 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=00 + ensure_reported_spans=true + test_step=02 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=00 + JOB_NUMBER=00 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./02-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./02-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' 00 03 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + job_number=00 + test_step=03 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=00 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./03-check-indices.yaml + JOB_NUMBER=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./03-assert.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' 01 04 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + job_number=01 + test_step=04 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=01 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./04-check-indices.yaml + JOB_NUMBER=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./04-assert.yaml + render_report_spans my-jaeger true 2 02 true 06 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=02 + ensure_reported_spans=true + test_step=06 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=02 + JOB_NUMBER=02 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./06-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./06-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' 02 07 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + job_number=02 + test_step=07 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=02 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./07-check-indices.yaml + JOB_NUMBER=02 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./07-assert.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' 03 08 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + job_number=03 + test_step=08 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=03 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./08-check-indices.yaml + JOB_NUMBER=03 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./08-assert.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' 04 09 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + job_number=04 + test_step=09 + escape_command ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=04 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./09-check-indices.yaml + JOB_NUMBER=04 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./09-assert.yaml + render_report_spans my-jaeger true 2 03 true 10 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=03 + ensure_reported_spans=true + test_step=10 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=03 + JOB_NUMBER=03 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./10-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./10-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + CRONJOB_NAME=my-jaeger-es-rollover + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/wait-for-cronjob-execution.yaml.template -o ./11-wait-rollover.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-000002'\'',' 05 11 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-000002'\'',' + job_number=05 + test_step=11 + escape_command ''\''--name'\'', '\''jaeger-span-000002'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-000002'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-000002'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-000002'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-000002'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=05 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-000002'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./11-check-indices.yaml + JOB_NUMBER=05 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./11-assert.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' 06 12 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + job_number=06 + test_step=12 + escape_command ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=06 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./12-check-indices.yaml + JOB_NUMBER=06 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./12-assert.yaml + '[' true = true ']' + get_elasticsearch_openshift_operator_version + export ESO_OPERATOR_VERSION + '[' true = true ']' ++ kubectl get pods -l name=elasticsearch-operator --all-namespaces '-o=jsonpath={.items[0].metadata.annotations.operatorframework\.io/properties}' + properties='{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.18},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.18"}}]}' + '[' -z '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.18},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.18"}}]}' ']' ++ echo '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.18},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.18"}}]}' ++ /tmp/jaeger-tests/bin/yq e -P '.properties.[] | select(.value.packageName == "elasticsearch-operator") | .value.version' + ESO_OPERATOR_VERSION=5.8.18 ++ version_ge 5.8.18 5.4 +++ echo 5.8.18 5.4 +++ tr ' ' '\n' +++ head -n 1 +++ sort -rV ++ test 5.8.18 == 5.8.18 + '[' -n '' ']' + skip_test es-rollover-managed 'Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 2 -ne 2 ']' + test_name=es-rollover-managed + message='Test only supported with Elasticsearch OpenShift Operator >= 5.4' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-rollover-autoprov + '[' es-rollover-autoprov '!=' _build ']' + cd .. + rm -rf es-rollover-managed + warning 'es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4\e[0m' WAR: es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4 + '[' true = true ']' + skip_test es-spark-dependencies 'This test is not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=es-spark-dependencies + message='This test is not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + rm -rf es-spark-dependencies + warning 'es-spark-dependencies: This test is not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-spark-dependencies: This test is not supported in OpenShift\e[0m' WAR: es-spark-dependencies: This test is not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running elasticsearch E2E tests' Running elasticsearch E2E tests + cd tests/e2e/elasticsearch/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3414875983 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 7 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/es-from-aio-to-production === PAUSE kuttl/harness/es-from-aio-to-production === RUN kuttl/harness/es-increasing-replicas === PAUSE kuttl/harness/es-increasing-replicas === RUN kuttl/harness/es-index-cleaner-autoprov === PAUSE kuttl/harness/es-index-cleaner-autoprov === RUN kuttl/harness/es-multiinstance === PAUSE kuttl/harness/es-multiinstance === RUN kuttl/harness/es-rollover-autoprov === PAUSE kuttl/harness/es-rollover-autoprov === RUN kuttl/harness/es-simple-prod === PAUSE kuttl/harness/es-simple-prod === CONT kuttl/harness/artifacts logger.go:42: 07:04:03 | artifacts | Creating namespace: kuttl-test-dear-blowfish logger.go:42: 07:04:03 | artifacts | artifacts events from ns kuttl-test-dear-blowfish: logger.go:42: 07:04:03 | artifacts | Deleting namespace: kuttl-test-dear-blowfish === CONT kuttl/harness/es-multiinstance logger.go:42: 07:04:09 | es-multiinstance | Ignoring 03-create-second-instance.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:04:09 | es-multiinstance | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:04:09 | es-multiinstance | Creating namespace: kuttl-test-verified-termite logger.go:42: 07:04:09 | es-multiinstance/0-clear-namespace | starting test step 0-clear-namespace logger.go:42: 07:04:09 | es-multiinstance/0-clear-namespace | running command: [sh -c kubectl delete namespace jaeger-e2e-multiinstance-test --ignore-not-found=true] logger.go:42: 07:04:09 | es-multiinstance/0-clear-namespace | test step completed 0-clear-namespace logger.go:42: 07:04:09 | es-multiinstance/1-install | starting test step 1-install logger.go:42: 07:04:09 | es-multiinstance/1-install | Jaeger:kuttl-test-verified-termite/instance-1 created logger.go:42: 07:05:05 | es-multiinstance/1-install | test step completed 1-install logger.go:42: 07:05:05 | es-multiinstance/2-create-namespace | starting test step 2-create-namespace logger.go:42: 07:05:05 | es-multiinstance/2-create-namespace | running command: [sh -c kubectl create namespace jaeger-e2e-multiinstance-test] logger.go:42: 07:05:05 | es-multiinstance/2-create-namespace | namespace/jaeger-e2e-multiinstance-test created logger.go:42: 07:05:05 | es-multiinstance/2-create-namespace | test step completed 2-create-namespace logger.go:42: 07:05:05 | es-multiinstance/3-create-second-instance | starting test step 3-create-second-instance logger.go:42: 07:05:05 | es-multiinstance/3-create-second-instance | running command: [sh -c kubectl apply -f ./01-install.yaml -n jaeger-e2e-multiinstance-test] logger.go:42: 07:05:06 | es-multiinstance/3-create-second-instance | jaeger.jaegertracing.io/instance-1 created logger.go:42: 07:05:06 | es-multiinstance/3-create-second-instance | running command: [sh -c /tmp/jaeger-tests/bin/kubectl-kuttl assert ./01-assert.yaml -n jaeger-e2e-multiinstance-test --timeout 1000] logger.go:42: 07:05:57 | es-multiinstance/3-create-second-instance | assert is valid logger.go:42: 07:05:57 | es-multiinstance/3-create-second-instance | test step completed 3-create-second-instance logger.go:42: 07:05:57 | es-multiinstance/4-check-secrets | starting test step 4-check-secrets logger.go:42: 07:05:57 | es-multiinstance/4-check-secrets | running command: [sh -c kubectl get secrets elasticsearch -o jsonpath='{.data.logging-es\.crt}' -n $NAMESPACE > secret1] logger.go:42: 07:05:57 | es-multiinstance/4-check-secrets | running command: [sh -c kubectl get secrets elasticsearch -o jsonpath='{.data.logging-es\.crt}' -n jaeger-e2e-multiinstance-test > secret2] logger.go:42: 07:05:57 | es-multiinstance/4-check-secrets | running command: [sh -c cmp --silent secret1 secret2 || exit 0] logger.go:42: 07:05:57 | es-multiinstance/4-check-secrets | test step completed 4-check-secrets logger.go:42: 07:05:57 | es-multiinstance/5-delete | starting test step 5-delete logger.go:42: 07:05:57 | es-multiinstance/5-delete | running command: [sh -c kubectl delete namespace jaeger-e2e-multiinstance-test --wait=false] logger.go:42: 07:05:57 | es-multiinstance/5-delete | namespace "jaeger-e2e-multiinstance-test" deleted logger.go:42: 07:05:57 | es-multiinstance/5-delete | test step completed 5-delete logger.go:42: 07:05:57 | es-multiinstance | es-multiinstance events from ns kuttl-test-verified-termite: logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:17 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestverifiedtermiteinstance1-1-756d549f SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestverifiedtermiteinstance1-1-756dxfnrz replicaset-controller logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestverifiedtermiteinstance1-1-756dxfnrz Binding Scheduled Successfully assigned kuttl-test-verified-termite/elasticsearch-cdm-kuttltestverifiedtermiteinstance1-1-756dxfnrz to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:17 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestverifiedtermiteinstance1-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestverifiedtermiteinstance1-1-756d549f from 0 to 1 deployment-controller logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:18 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestverifiedtermiteinstance1-1-756dxfnrz AddedInterface Add eth0 [10.131.0.17/23] from ovn-kubernetes multus logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:18 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestverifiedtermiteinstance1-1-756dxfnrz.spec.containers{elasticsearch} Pulling Pulling image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:ebe8dc1ce7ba2f2badccbd4f55a96c60fccdc835fa3582b169ddd34fbf03ca76" kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:28 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestverifiedtermiteinstance1-1-756dxfnrz.spec.containers{elasticsearch} Pulled Successfully pulled image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:ebe8dc1ce7ba2f2badccbd4f55a96c60fccdc835fa3582b169ddd34fbf03ca76" in 9.924s (9.924s including waiting). Image size: 538266543 bytes. kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:28 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestverifiedtermiteinstance1-1-756dxfnrz.spec.containers{elasticsearch} Created Created container: elasticsearch kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:28 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestverifiedtermiteinstance1-1-756dxfnrz.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:28 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestverifiedtermiteinstance1-1-756dxfnrz.spec.containers{proxy} Pulling Pulling image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:5e4e69d24d07f6efda72b0c0baddfd2979902bfa92eb0a707bd3ed822b7c4a4c" kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestverifiedtermiteinstance1-1-756dxfnrz.spec.containers{proxy} Pulled Successfully pulled image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:5e4e69d24d07f6efda72b0c0baddfd2979902bfa92eb0a707bd3ed822b7c4a4c" in 3.095s (3.095s including waiting). Image size: 286131829 bytes. kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestverifiedtermiteinstance1-1-756dxfnrz.spec.containers{proxy} Created Created container: proxy kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestverifiedtermiteinstance1-1-756dxfnrz.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:38 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestverifiedtermiteinstance1-1-756dxfnrz.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:43 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestverifiedtermiteinstance1-1-756dxfnrz.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:54 +0000 UTC Normal Pod instance-1-collector-5b49b6b4b-2wgkc Binding Scheduled Successfully assigned kuttl-test-verified-termite/instance-1-collector-5b49b6b4b-2wgkc to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:54 +0000 UTC Normal ReplicaSet.apps instance-1-collector-5b49b6b4b SuccessfulCreate Created pod: instance-1-collector-5b49b6b4b-2wgkc replicaset-controller logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:54 +0000 UTC Normal Deployment.apps instance-1-collector ScalingReplicaSet Scaled up replica set instance-1-collector-5b49b6b4b from 0 to 1 deployment-controller logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:54 +0000 UTC Normal Pod instance-1-query-7cbcb46cd-5prf2 Binding Scheduled Successfully assigned kuttl-test-verified-termite/instance-1-query-7cbcb46cd-5prf2 to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:54 +0000 UTC Normal ReplicaSet.apps instance-1-query-7cbcb46cd SuccessfulCreate Created pod: instance-1-query-7cbcb46cd-5prf2 replicaset-controller logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:54 +0000 UTC Normal Deployment.apps instance-1-query ScalingReplicaSet Scaled up replica set instance-1-query-7cbcb46cd from 0 to 1 deployment-controller logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:55 +0000 UTC Normal Pod instance-1-collector-5b49b6b4b-2wgkc AddedInterface Add eth0 [10.129.2.18/23] from ovn-kubernetes multus logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:55 +0000 UTC Normal Pod instance-1-collector-5b49b6b4b-2wgkc.spec.containers{jaeger-collector} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:40a1c7fa23aa5ffa64e8e22aa38022f5d4d7ff644c46a3da7169b713d486c3c1" kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:55 +0000 UTC Normal Pod instance-1-query-7cbcb46cd-5prf2 AddedInterface Add eth0 [10.128.2.25/23] from ovn-kubernetes multus logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:55 +0000 UTC Normal Pod instance-1-query-7cbcb46cd-5prf2.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:58 +0000 UTC Normal Pod instance-1-query-7cbcb46cd-5prf2.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" in 2.515s (2.515s including waiting). Image size: 142020742 bytes. kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:58 +0000 UTC Normal Pod instance-1-query-7cbcb46cd-5prf2.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:58 +0000 UTC Normal Pod instance-1-query-7cbcb46cd-5prf2.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:04:58 +0000 UTC Normal Pod instance-1-query-7cbcb46cd-5prf2.spec.containers{oauth-proxy} Pulling Pulling image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:05:01 +0000 UTC Normal Pod instance-1-query-7cbcb46cd-5prf2.spec.containers{oauth-proxy} Pulled Successfully pulled image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" in 3.34s (3.34s including waiting). Image size: 438322369 bytes. kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:05:01 +0000 UTC Normal Pod instance-1-query-7cbcb46cd-5prf2.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:05:01 +0000 UTC Normal Pod instance-1-query-7cbcb46cd-5prf2.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:05:01 +0000 UTC Normal Pod instance-1-query-7cbcb46cd-5prf2.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:05:02 +0000 UTC Normal Pod instance-1-collector-5b49b6b4b-2wgkc.spec.containers{jaeger-collector} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:40a1c7fa23aa5ffa64e8e22aa38022f5d4d7ff644c46a3da7169b713d486c3c1" in 7.508s (7.508s including waiting). Image size: 139779827 bytes. kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:05:02 +0000 UTC Normal Pod instance-1-collector-5b49b6b4b-2wgkc.spec.containers{jaeger-collector} Created Created container: jaeger-collector kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:05:02 +0000 UTC Normal Pod instance-1-collector-5b49b6b4b-2wgkc.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:05:03 +0000 UTC Normal Pod instance-1-query-7cbcb46cd-5prf2.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" in 1.763s (1.763s including waiting). Image size: 112614125 bytes. kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:05:03 +0000 UTC Normal Pod instance-1-query-7cbcb46cd-5prf2.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:05:03 +0000 UTC Normal Pod instance-1-query-7cbcb46cd-5prf2.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:05:20 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:05:20 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:05:20 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:05:50 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod instance-1-collector-5b49b6b4b-2wgkc horizontal-pod-autoscaler logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:05:50 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod instance-1-collector-5b49b6b4b-2wgkc horizontal-pod-autoscaler logger.go:42: 07:05:57 | es-multiinstance | 2025-03-10 07:05:50 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod instance-1-collector-5b49b6b4b-2wgkc horizontal-pod-autoscaler logger.go:42: 07:05:57 | es-multiinstance | Deleting namespace: kuttl-test-verified-termite === CONT kuttl/harness/es-simple-prod logger.go:42: 07:06:05 | es-simple-prod | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:06:05 | es-simple-prod | Creating namespace: kuttl-test-outgoing-oryx logger.go:42: 07:06:05 | es-simple-prod | es-simple-prod events from ns kuttl-test-outgoing-oryx: logger.go:42: 07:06:05 | es-simple-prod | Deleting namespace: kuttl-test-outgoing-oryx === CONT kuttl/harness/es-rollover-autoprov logger.go:42: 07:06:11 | es-rollover-autoprov | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:06:11 | es-rollover-autoprov | Creating namespace: kuttl-test-one-peacock logger.go:42: 07:06:11 | es-rollover-autoprov/1-install | starting test step 1-install logger.go:42: 07:06:11 | es-rollover-autoprov/1-install | Jaeger:kuttl-test-one-peacock/my-jaeger created logger.go:42: 07:06:50 | es-rollover-autoprov/1-install | test step completed 1-install logger.go:42: 07:06:50 | es-rollover-autoprov/2-report-spans | starting test step 2-report-spans logger.go:42: 07:06:50 | es-rollover-autoprov/2-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:06:51 | es-rollover-autoprov/2-report-spans | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:07:00 | es-rollover-autoprov/2-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JOB_NUMBER=00 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-00-job.yaml] logger.go:42: 07:07:00 | es-rollover-autoprov/2-report-spans | running command: [sh -c kubectl apply -f report-span-00-job.yaml -n $NAMESPACE] logger.go:42: 07:07:01 | es-rollover-autoprov/2-report-spans | job.batch/00-report-span created logger.go:42: 07:07:27 | es-rollover-autoprov/2-report-spans | test step completed 2-report-spans logger.go:42: 07:07:27 | es-rollover-autoprov/3-check-indices | starting test step 3-check-indices logger.go:42: 07:07:27 | es-rollover-autoprov/3-check-indices | Job:kuttl-test-one-peacock/00-check-indices created logger.go:42: 07:07:32 | es-rollover-autoprov/3-check-indices | test step completed 3-check-indices logger.go:42: 07:07:32 | es-rollover-autoprov/4-check-indices | starting test step 4-check-indices logger.go:42: 07:07:32 | es-rollover-autoprov/4-check-indices | Job:kuttl-test-one-peacock/01-check-indices created logger.go:42: 07:07:40 | es-rollover-autoprov/4-check-indices | test step completed 4-check-indices logger.go:42: 07:07:40 | es-rollover-autoprov/5-install | starting test step 5-install logger.go:42: 07:07:40 | es-rollover-autoprov/5-install | Jaeger:kuttl-test-one-peacock/my-jaeger updated logger.go:42: 07:07:48 | es-rollover-autoprov/5-install | test step completed 5-install logger.go:42: 07:07:48 | es-rollover-autoprov/6-report-spans | starting test step 6-report-spans logger.go:42: 07:07:48 | es-rollover-autoprov/6-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:07:57 | es-rollover-autoprov/6-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JOB_NUMBER=02 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-02-job.yaml] logger.go:42: 07:07:58 | es-rollover-autoprov/6-report-spans | running command: [sh -c kubectl apply -f report-span-02-job.yaml -n $NAMESPACE] logger.go:42: 07:07:58 | es-rollover-autoprov/6-report-spans | job.batch/02-report-span created logger.go:42: 07:08:22 | es-rollover-autoprov/6-report-spans | test step completed 6-report-spans logger.go:42: 07:08:22 | es-rollover-autoprov/7-check-indices | starting test step 7-check-indices logger.go:42: 07:08:22 | es-rollover-autoprov/7-check-indices | Job:kuttl-test-one-peacock/02-check-indices created logger.go:42: 07:08:26 | es-rollover-autoprov/7-check-indices | test step completed 7-check-indices logger.go:42: 07:08:26 | es-rollover-autoprov/8-check-indices | starting test step 8-check-indices logger.go:42: 07:08:26 | es-rollover-autoprov/8-check-indices | Job:kuttl-test-one-peacock/03-check-indices created logger.go:42: 07:08:32 | es-rollover-autoprov/8-check-indices | test step completed 8-check-indices logger.go:42: 07:08:32 | es-rollover-autoprov/9-check-indices | starting test step 9-check-indices logger.go:42: 07:08:32 | es-rollover-autoprov/9-check-indices | Job:kuttl-test-one-peacock/04-check-indices created logger.go:42: 07:08:37 | es-rollover-autoprov/9-check-indices | test step completed 9-check-indices logger.go:42: 07:08:37 | es-rollover-autoprov/10-report-spans | starting test step 10-report-spans logger.go:42: 07:08:37 | es-rollover-autoprov/10-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:08:44 | es-rollover-autoprov/10-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JOB_NUMBER=03 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-03-job.yaml] logger.go:42: 07:08:45 | es-rollover-autoprov/10-report-spans | running command: [sh -c kubectl apply -f report-span-03-job.yaml -n $NAMESPACE] logger.go:42: 07:08:45 | es-rollover-autoprov/10-report-spans | job.batch/03-report-span created logger.go:42: 07:09:10 | es-rollover-autoprov/10-report-spans | test step completed 10-report-spans logger.go:42: 07:09:10 | es-rollover-autoprov/11-check-indices | starting test step 11-check-indices logger.go:42: 07:09:10 | es-rollover-autoprov/11-check-indices | running command: [sh -c go run ../../../../cmd-utils/wait-cronjob/main.go --cronjob my-jaeger-es-rollover --namespace $NAMESPACE] logger.go:42: 07:09:23 | es-rollover-autoprov/11-check-indices | time="2025-03-10T07:09:23Z" level=debug msg="Checking if the my-jaeger-es-rollover CronJob exists" logger.go:42: 07:09:23 | es-rollover-autoprov/11-check-indices | time="2025-03-10T07:09:23Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 07:09:23 | es-rollover-autoprov/11-check-indices | time="2025-03-10T07:09:23Z" level=info msg="Cronjob my-jaeger-es-rollover found successfully" logger.go:42: 07:09:23 | es-rollover-autoprov/11-check-indices | time="2025-03-10T07:09:23Z" level=debug msg="Waiting for the next scheduled job from my-jaeger-es-rollover cronjob" logger.go:42: 07:09:23 | es-rollover-autoprov/11-check-indices | time="2025-03-10T07:09:23Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 07:09:33 | es-rollover-autoprov/11-check-indices | time="2025-03-10T07:09:33Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 07:09:43 | es-rollover-autoprov/11-check-indices | time="2025-03-10T07:09:43Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 07:09:53 | es-rollover-autoprov/11-check-indices | time="2025-03-10T07:09:53Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 07:10:03 | es-rollover-autoprov/11-check-indices | time="2025-03-10T07:10:03Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 07:10:13 | es-rollover-autoprov/11-check-indices | time="2025-03-10T07:10:13Z" level=info msg="Job of owner my-jaeger-es-rollover succeeded after my-jaeger-es-rollover 50.04333819s" logger.go:42: 07:10:13 | es-rollover-autoprov/11-check-indices | Job:kuttl-test-one-peacock/05-check-indices created logger.go:42: 07:10:18 | es-rollover-autoprov/11-check-indices | test step completed 11-check-indices logger.go:42: 07:10:18 | es-rollover-autoprov/12-check-indices | starting test step 12-check-indices logger.go:42: 07:10:18 | es-rollover-autoprov/12-check-indices | Job:kuttl-test-one-peacock/06-check-indices created logger.go:42: 07:10:22 | es-rollover-autoprov/12-check-indices | test step completed 12-check-indices logger.go:42: 07:10:22 | es-rollover-autoprov | es-rollover-autoprov events from ns kuttl-test-one-peacock: logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:17 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestonepeacockmyjaeger-1-5f6f878c75 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestonepeacockmyjaeger-1-5f6f878c75kd4gq replicaset-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestonepeacockmyjaeger-1-5f6f878c75kd4gq Binding Scheduled Successfully assigned kuttl-test-one-peacock/elasticsearch-cdm-kuttltestonepeacockmyjaeger-1-5f6f878c75kd4gq to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:17 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestonepeacockmyjaeger-1-5f6f878c75kd4gq FailedMount MountVolume.SetUp failed for volume "elasticsearch-metrics" : secret "elasticsearch-metrics" not found kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:17 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestonepeacockmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestonepeacockmyjaeger-1-5f6f878c75 from 0 to 1 deployment-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:18 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestonepeacockmyjaeger-1-5f6f878c75kd4gq AddedInterface Add eth0 [10.131.0.18/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:18 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestonepeacockmyjaeger-1-5f6f878c75kd4gq.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:ebe8dc1ce7ba2f2badccbd4f55a96c60fccdc835fa3582b169ddd34fbf03ca76" already present on machine kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:19 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestonepeacockmyjaeger-1-5f6f878c75kd4gq.spec.containers{elasticsearch} Created Created container: elasticsearch kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:19 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestonepeacockmyjaeger-1-5f6f878c75kd4gq.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:19 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestonepeacockmyjaeger-1-5f6f878c75kd4gq.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:5e4e69d24d07f6efda72b0c0baddfd2979902bfa92eb0a707bd3ed822b7c4a4c" already present on machine kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:19 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestonepeacockmyjaeger-1-5f6f878c75kd4gq.spec.containers{proxy} Created Created container: proxy kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:19 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestonepeacockmyjaeger-1-5f6f878c75kd4gq.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:31 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestonepeacockmyjaeger-1-5f6f878c75kd4gq.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:36 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestonepeacockmyjaeger-1-5f6f878c75kd4gq.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:47 +0000 UTC Normal Pod my-jaeger-collector-576b757b4d-t6jv4 Binding Scheduled Successfully assigned kuttl-test-one-peacock/my-jaeger-collector-576b757b4d-t6jv4 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:47 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-576b757b4d SuccessfulCreate Created pod: my-jaeger-collector-576b757b4d-t6jv4 replicaset-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:47 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-576b757b4d from 0 to 1 deployment-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:47 +0000 UTC Normal Pod my-jaeger-query-5547fcb99b-gzmfv Binding Scheduled Successfully assigned kuttl-test-one-peacock/my-jaeger-query-5547fcb99b-gzmfv to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:47 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-5547fcb99b SuccessfulCreate Created pod: my-jaeger-query-5547fcb99b-gzmfv replicaset-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:47 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-5547fcb99b from 0 to 1 deployment-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:48 +0000 UTC Normal Pod my-jaeger-collector-576b757b4d-t6jv4 AddedInterface Add eth0 [10.129.2.20/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:48 +0000 UTC Normal Pod my-jaeger-collector-576b757b4d-t6jv4.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:40a1c7fa23aa5ffa64e8e22aa38022f5d4d7ff644c46a3da7169b713d486c3c1" already present on machine kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:48 +0000 UTC Normal Pod my-jaeger-collector-576b757b4d-t6jv4.spec.containers{jaeger-collector} Created Created container: jaeger-collector kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:48 +0000 UTC Normal Pod my-jaeger-collector-576b757b4d-t6jv4.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:48 +0000 UTC Normal Pod my-jaeger-query-5547fcb99b-gzmfv AddedInterface Add eth0 [10.128.2.28/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:48 +0000 UTC Normal Pod my-jaeger-query-5547fcb99b-gzmfv.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:48 +0000 UTC Normal Pod my-jaeger-query-5547fcb99b-gzmfv.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:48 +0000 UTC Normal Pod my-jaeger-query-5547fcb99b-gzmfv.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:48 +0000 UTC Normal Pod my-jaeger-query-5547fcb99b-gzmfv.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:48 +0000 UTC Normal Pod my-jaeger-query-5547fcb99b-gzmfv.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:48 +0000 UTC Normal Pod my-jaeger-query-5547fcb99b-gzmfv.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:48 +0000 UTC Normal Pod my-jaeger-query-5547fcb99b-gzmfv.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:48 +0000 UTC Normal Pod my-jaeger-query-5547fcb99b-gzmfv.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:48 +0000 UTC Normal Pod my-jaeger-query-5547fcb99b-gzmfv.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:55 +0000 UTC Normal Pod my-jaeger-query-5547fcb99b-gzmfv.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:55 +0000 UTC Normal Pod my-jaeger-query-5547fcb99b-gzmfv.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:55 +0000 UTC Normal Pod my-jaeger-query-5547fcb99b-gzmfv.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:55 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-5547fcb99b SuccessfulDelete Deleted pod: my-jaeger-query-5547fcb99b-gzmfv replicaset-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:55 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-5547fcb99b from 1 to 0 deployment-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:56 +0000 UTC Normal Pod my-jaeger-query-b9b5dbc45-ckjx5 Binding Scheduled Successfully assigned kuttl-test-one-peacock/my-jaeger-query-b9b5dbc45-ckjx5 to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:56 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-b9b5dbc45 SuccessfulCreate Created pod: my-jaeger-query-b9b5dbc45-ckjx5 replicaset-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:56 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-b9b5dbc45 from 0 to 1 deployment-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:57 +0000 UTC Normal Pod my-jaeger-query-b9b5dbc45-ckjx5 AddedInterface Add eth0 [10.128.2.29/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:57 +0000 UTC Normal Pod my-jaeger-query-b9b5dbc45-ckjx5.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:57 +0000 UTC Normal Pod my-jaeger-query-b9b5dbc45-ckjx5.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:57 +0000 UTC Normal Pod my-jaeger-query-b9b5dbc45-ckjx5.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:57 +0000 UTC Normal Pod my-jaeger-query-b9b5dbc45-ckjx5.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:57 +0000 UTC Normal Pod my-jaeger-query-b9b5dbc45-ckjx5.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:57 +0000 UTC Normal Pod my-jaeger-query-b9b5dbc45-ckjx5.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:57 +0000 UTC Normal Pod my-jaeger-query-b9b5dbc45-ckjx5.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:57 +0000 UTC Normal Pod my-jaeger-query-b9b5dbc45-ckjx5.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:06:57 +0000 UTC Normal Pod my-jaeger-query-b9b5dbc45-ckjx5.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:01 +0000 UTC Normal Pod 00-report-span-t5t9p Binding Scheduled Successfully assigned kuttl-test-one-peacock/00-report-span-t5t9p to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:01 +0000 UTC Normal Pod 00-report-span-t5t9p AddedInterface Add eth0 [10.129.2.21/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:01 +0000 UTC Normal Pod 00-report-span-t5t9p.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:01 +0000 UTC Normal Job.batch 00-report-span SuccessfulCreate Created pod: 00-report-span-t5t9p job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:02 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:02 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:02 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:03 +0000 UTC Normal Pod 00-report-span-t5t9p.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 2.321s (2.321s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:04 +0000 UTC Normal Pod 00-report-span-t5t9p.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:04 +0000 UTC Normal Pod 00-report-span-t5t9p.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:26 +0000 UTC Normal Job.batch 00-report-span Completed Job completed job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:27 +0000 UTC Normal Pod 00-check-indices-9l9l4 Binding Scheduled Successfully assigned kuttl-test-one-peacock/00-check-indices-9l9l4 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:27 +0000 UTC Normal Job.batch 00-check-indices SuccessfulCreate Created pod: 00-check-indices-9l9l4 job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:28 +0000 UTC Normal Pod 00-check-indices-9l9l4 AddedInterface Add eth0 [10.129.2.22/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:28 +0000 UTC Normal Pod 00-check-indices-9l9l4.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:28 +0000 UTC Normal Pod 00-check-indices-9l9l4.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 696ms (696ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:28 +0000 UTC Normal Pod 00-check-indices-9l9l4.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:28 +0000 UTC Normal Pod 00-check-indices-9l9l4.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:32 +0000 UTC Normal Job.batch 00-check-indices Completed Job completed job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:32 +0000 UTC Normal Pod 01-check-indices-4kptw Binding Scheduled Successfully assigned kuttl-test-one-peacock/01-check-indices-4kptw to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:32 +0000 UTC Normal Job.batch 01-check-indices SuccessfulCreate Created pod: 01-check-indices-4kptw job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:32 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod my-jaeger-collector-576b757b4d-t6jv4 horizontal-pod-autoscaler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:32 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod my-jaeger-collector-576b757b4d-t6jv4 horizontal-pod-autoscaler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:32 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod my-jaeger-collector-576b757b4d-t6jv4 horizontal-pod-autoscaler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:33 +0000 UTC Normal Pod 01-check-indices-4kptw AddedInterface Add eth0 [10.129.2.23/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:33 +0000 UTC Normal Pod 01-check-indices-4kptw.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:36 +0000 UTC Normal Pod 01-check-indices-4kptw.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 3.537s (3.537s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:36 +0000 UTC Normal Pod 01-check-indices-4kptw.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:36 +0000 UTC Normal Pod 01-check-indices-4kptw.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:40 +0000 UTC Normal Job.batch 01-check-indices Completed Job completed job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:42 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-4562t Binding Scheduled Successfully assigned kuttl-test-one-peacock/my-jaeger-es-rollover-create-mapping-4562t to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:42 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-4562t AddedInterface Add eth0 [10.129.2.24/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:42 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-4562t.spec.containers{my-jaeger-es-rollover-create-mapping} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:146ea9aab82c1e7dd871e269a58e0491439b4ce3b98c281e46214fb813cd8194" kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:42 +0000 UTC Normal Job.batch my-jaeger-es-rollover-create-mapping SuccessfulCreate Created pod: my-jaeger-es-rollover-create-mapping-4562t job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:45 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-4562t.spec.containers{my-jaeger-es-rollover-create-mapping} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:146ea9aab82c1e7dd871e269a58e0491439b4ce3b98c281e46214fb813cd8194" in 2.255s (2.255s including waiting). Image size: 104079089 bytes. kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:45 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-4562t.spec.containers{my-jaeger-es-rollover-create-mapping} Created Created container: my-jaeger-es-rollover-create-mapping kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:45 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-4562t.spec.containers{my-jaeger-es-rollover-create-mapping} Started Started container my-jaeger-es-rollover-create-mapping kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:48 +0000 UTC Normal Pod my-jaeger-collector-576b757b4d-t6jv4.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:48 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-576b757b4d SuccessfulDelete Deleted pod: my-jaeger-collector-576b757b4d-t6jv4 replicaset-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:48 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled down replica set my-jaeger-collector-576b757b4d from 1 to 0 deployment-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:48 +0000 UTC Normal Job.batch my-jaeger-es-rollover-create-mapping Completed Job completed job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:48 +0000 UTC Normal Pod my-jaeger-query-b9b5dbc45-ckjx5.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:48 +0000 UTC Normal Pod my-jaeger-query-b9b5dbc45-ckjx5.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:48 +0000 UTC Normal Pod my-jaeger-query-b9b5dbc45-ckjx5.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:48 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-b9b5dbc45 SuccessfulDelete Deleted pod: my-jaeger-query-b9b5dbc45-ckjx5 replicaset-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:48 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-b9b5dbc45 from 1 to 0 deployment-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:49 +0000 UTC Normal Pod my-jaeger-collector-d84464cf-zgcbb Binding Scheduled Successfully assigned kuttl-test-one-peacock/my-jaeger-collector-d84464cf-zgcbb to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:49 +0000 UTC Normal Pod my-jaeger-collector-d84464cf-zgcbb AddedInterface Add eth0 [10.129.2.25/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:49 +0000 UTC Normal Pod my-jaeger-collector-d84464cf-zgcbb.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:40a1c7fa23aa5ffa64e8e22aa38022f5d4d7ff644c46a3da7169b713d486c3c1" already present on machine kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:49 +0000 UTC Normal Pod my-jaeger-collector-d84464cf-zgcbb.spec.containers{jaeger-collector} Created Created container: jaeger-collector kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:49 +0000 UTC Normal Pod my-jaeger-collector-d84464cf-zgcbb.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:49 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-d84464cf SuccessfulCreate Created pod: my-jaeger-collector-d84464cf-zgcbb replicaset-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:49 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-d84464cf from 0 to 1 deployment-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:49 +0000 UTC Normal Pod my-jaeger-query-5c64c8f6c8-np9f6 Binding Scheduled Successfully assigned kuttl-test-one-peacock/my-jaeger-query-5c64c8f6c8-np9f6 to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:49 +0000 UTC Normal Pod my-jaeger-query-5c64c8f6c8-np9f6 AddedInterface Add eth0 [10.128.2.30/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:49 +0000 UTC Normal Pod my-jaeger-query-5c64c8f6c8-np9f6.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:49 +0000 UTC Normal Pod my-jaeger-query-5c64c8f6c8-np9f6.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:49 +0000 UTC Normal Pod my-jaeger-query-5c64c8f6c8-np9f6.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:49 +0000 UTC Normal Pod my-jaeger-query-5c64c8f6c8-np9f6.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:49 +0000 UTC Normal Pod my-jaeger-query-5c64c8f6c8-np9f6.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:49 +0000 UTC Normal Pod my-jaeger-query-5c64c8f6c8-np9f6.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:49 +0000 UTC Normal Pod my-jaeger-query-5c64c8f6c8-np9f6.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:49 +0000 UTC Normal Pod my-jaeger-query-5c64c8f6c8-np9f6.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:49 +0000 UTC Normal Pod my-jaeger-query-5c64c8f6c8-np9f6.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:49 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-5c64c8f6c8 SuccessfulCreate Created pod: my-jaeger-query-5c64c8f6c8-np9f6 replicaset-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:49 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-5c64c8f6c8 from 0 to 1 deployment-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:58 +0000 UTC Normal Pod 02-report-span-vlq7l Binding Scheduled Successfully assigned kuttl-test-one-peacock/02-report-span-vlq7l to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:58 +0000 UTC Normal Pod 02-report-span-vlq7l AddedInterface Add eth0 [10.129.2.26/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:58 +0000 UTC Normal Pod 02-report-span-vlq7l.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:58 +0000 UTC Normal Job.batch 02-report-span SuccessfulCreate Created pod: 02-report-span-vlq7l job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:59 +0000 UTC Normal Pod 02-report-span-vlq7l.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 433ms (433ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:59 +0000 UTC Normal Pod 02-report-span-vlq7l.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:07:59 +0000 UTC Normal Pod 02-report-span-vlq7l.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:00 +0000 UTC Normal Pod my-jaeger-es-lookback-29026508-pkdj4 Binding Scheduled Successfully assigned kuttl-test-one-peacock/my-jaeger-es-lookback-29026508-pkdj4 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:00 +0000 UTC Normal Pod my-jaeger-es-lookback-29026508-pkdj4 AddedInterface Add eth0 [10.129.2.27/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:00 +0000 UTC Normal Pod my-jaeger-es-lookback-29026508-pkdj4.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:146ea9aab82c1e7dd871e269a58e0491439b4ce3b98c281e46214fb813cd8194" already present on machine kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:00 +0000 UTC Normal Pod my-jaeger-es-lookback-29026508-pkdj4.spec.containers{my-jaeger-es-lookback} Created Created container: my-jaeger-es-lookback kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:00 +0000 UTC Normal Pod my-jaeger-es-lookback-29026508-pkdj4.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-29026508 SuccessfulCreate Created pod: my-jaeger-es-lookback-29026508-pkdj4 job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-29026508 cronjob-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:00 +0000 UTC Normal Pod my-jaeger-es-rollover-29026508-gbzx8 Binding Scheduled Successfully assigned kuttl-test-one-peacock/my-jaeger-es-rollover-29026508-gbzx8 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:00 +0000 UTC Normal Pod my-jaeger-es-rollover-29026508-gbzx8 AddedInterface Add eth0 [10.129.2.28/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:00 +0000 UTC Normal Pod my-jaeger-es-rollover-29026508-gbzx8.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:146ea9aab82c1e7dd871e269a58e0491439b4ce3b98c281e46214fb813cd8194" already present on machine kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:00 +0000 UTC Normal Pod my-jaeger-es-rollover-29026508-gbzx8.spec.containers{my-jaeger-es-rollover} Created Created container: my-jaeger-es-rollover kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:00 +0000 UTC Normal Pod my-jaeger-es-rollover-29026508-gbzx8.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-29026508 SuccessfulCreate Created pod: my-jaeger-es-rollover-29026508-gbzx8 job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-29026508 cronjob-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-29026508 Completed Job completed job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-29026508, condition: Complete cronjob-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-29026508 Completed Job completed job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-29026508, condition: Complete cronjob-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:22 +0000 UTC Normal Pod 02-check-indices-j9f5s Binding Scheduled Successfully assigned kuttl-test-one-peacock/02-check-indices-j9f5s to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:22 +0000 UTC Normal Job.batch 02-check-indices SuccessfulCreate Created pod: 02-check-indices-j9f5s job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:22 +0000 UTC Normal Job.batch 02-report-span Completed Job completed job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:23 +0000 UTC Normal Pod 02-check-indices-j9f5s AddedInterface Add eth0 [10.129.2.29/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:23 +0000 UTC Normal Pod 02-check-indices-j9f5s.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:23 +0000 UTC Normal Pod 02-check-indices-j9f5s.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 517ms (517ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:24 +0000 UTC Normal Pod 02-check-indices-j9f5s.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:24 +0000 UTC Normal Pod 02-check-indices-j9f5s.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:26 +0000 UTC Normal Job.batch 02-check-indices Completed Job completed job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:27 +0000 UTC Normal Pod 03-check-indices-clhts Binding Scheduled Successfully assigned kuttl-test-one-peacock/03-check-indices-clhts to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:27 +0000 UTC Normal Pod 03-check-indices-clhts AddedInterface Add eth0 [10.129.2.30/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:27 +0000 UTC Normal Pod 03-check-indices-clhts.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:27 +0000 UTC Normal Job.batch 03-check-indices SuccessfulCreate Created pod: 03-check-indices-clhts job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:28 +0000 UTC Normal Pod 03-check-indices-clhts.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 439ms (439ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:28 +0000 UTC Normal Pod 03-check-indices-clhts.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:28 +0000 UTC Normal Pod 03-check-indices-clhts.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:31 +0000 UTC Normal Job.batch 03-check-indices Completed Job completed job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:32 +0000 UTC Normal Pod 04-check-indices-f57w9 Binding Scheduled Successfully assigned kuttl-test-one-peacock/04-check-indices-f57w9 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:32 +0000 UTC Normal Pod 04-check-indices-f57w9 AddedInterface Add eth0 [10.129.2.31/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:32 +0000 UTC Normal Pod 04-check-indices-f57w9.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:32 +0000 UTC Normal Job.batch 04-check-indices SuccessfulCreate Created pod: 04-check-indices-f57w9 job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:32 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod my-jaeger-collector-d84464cf-zgcbb horizontal-pod-autoscaler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:32 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod my-jaeger-collector-d84464cf-zgcbb horizontal-pod-autoscaler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:32 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod my-jaeger-collector-d84464cf-zgcbb horizontal-pod-autoscaler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:33 +0000 UTC Normal Pod 04-check-indices-f57w9.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 764ms (764ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:33 +0000 UTC Normal Pod 04-check-indices-f57w9.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:33 +0000 UTC Normal Pod 04-check-indices-f57w9.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:36 +0000 UTC Normal Job.batch 04-check-indices Completed Job completed job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:45 +0000 UTC Normal Pod 03-report-span-2qvhm Binding Scheduled Successfully assigned kuttl-test-one-peacock/03-report-span-2qvhm to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:45 +0000 UTC Normal Job.batch 03-report-span SuccessfulCreate Created pod: 03-report-span-2qvhm job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:46 +0000 UTC Normal Pod 03-report-span-2qvhm AddedInterface Add eth0 [10.129.2.32/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:46 +0000 UTC Normal Pod 03-report-span-2qvhm.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:46 +0000 UTC Normal Pod 03-report-span-2qvhm.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 676ms (676ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:47 +0000 UTC Normal Pod 03-report-span-2qvhm.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:08:47 +0000 UTC Normal Pod 03-report-span-2qvhm.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:09:00 +0000 UTC Normal Pod my-jaeger-es-lookback-29026509-b8br9 Binding Scheduled Successfully assigned kuttl-test-one-peacock/my-jaeger-es-lookback-29026509-b8br9 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:09:00 +0000 UTC Normal Pod my-jaeger-es-lookback-29026509-b8br9 AddedInterface Add eth0 [10.129.2.34/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:09:00 +0000 UTC Normal Pod my-jaeger-es-lookback-29026509-b8br9.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:146ea9aab82c1e7dd871e269a58e0491439b4ce3b98c281e46214fb813cd8194" already present on machine kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:09:00 +0000 UTC Normal Pod my-jaeger-es-lookback-29026509-b8br9.spec.containers{my-jaeger-es-lookback} Created Created container: my-jaeger-es-lookback kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:09:00 +0000 UTC Normal Pod my-jaeger-es-lookback-29026509-b8br9.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:09:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-29026509 SuccessfulCreate Created pod: my-jaeger-es-lookback-29026509-b8br9 job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:09:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-29026509 cronjob-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:09:00 +0000 UTC Normal Pod my-jaeger-es-rollover-29026509-mgwhr Binding Scheduled Successfully assigned kuttl-test-one-peacock/my-jaeger-es-rollover-29026509-mgwhr to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:09:00 +0000 UTC Normal Pod my-jaeger-es-rollover-29026509-mgwhr AddedInterface Add eth0 [10.129.2.33/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:09:00 +0000 UTC Normal Pod my-jaeger-es-rollover-29026509-mgwhr.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:146ea9aab82c1e7dd871e269a58e0491439b4ce3b98c281e46214fb813cd8194" already present on machine kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:09:00 +0000 UTC Normal Pod my-jaeger-es-rollover-29026509-mgwhr.spec.containers{my-jaeger-es-rollover} Created Created container: my-jaeger-es-rollover kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:09:00 +0000 UTC Normal Pod my-jaeger-es-rollover-29026509-mgwhr.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:09:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-29026509 SuccessfulCreate Created pod: my-jaeger-es-rollover-29026509-mgwhr job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:09:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-29026509 cronjob-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:09:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-29026509 Completed Job completed job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:09:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-29026509, condition: Complete cronjob-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:09:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-29026509 Completed Job completed job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:09:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-29026509, condition: Complete cronjob-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:09:09 +0000 UTC Normal Job.batch 03-report-span Completed Job completed job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:00 +0000 UTC Normal Pod my-jaeger-es-lookback-29026510-mb9hk Binding Scheduled Successfully assigned kuttl-test-one-peacock/my-jaeger-es-lookback-29026510-mb9hk to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:00 +0000 UTC Normal Pod my-jaeger-es-lookback-29026510-mb9hk AddedInterface Add eth0 [10.129.2.36/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:00 +0000 UTC Normal Pod my-jaeger-es-lookback-29026510-mb9hk.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:146ea9aab82c1e7dd871e269a58e0491439b4ce3b98c281e46214fb813cd8194" already present on machine kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:00 +0000 UTC Normal Pod my-jaeger-es-lookback-29026510-mb9hk.spec.containers{my-jaeger-es-lookback} Created Created container: my-jaeger-es-lookback kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:00 +0000 UTC Normal Pod my-jaeger-es-lookback-29026510-mb9hk.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-29026510 SuccessfulCreate Created pod: my-jaeger-es-lookback-29026510-mb9hk job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-29026510 cronjob-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:00 +0000 UTC Normal Pod my-jaeger-es-rollover-29026510-fq8ng Binding Scheduled Successfully assigned kuttl-test-one-peacock/my-jaeger-es-rollover-29026510-fq8ng to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:00 +0000 UTC Normal Pod my-jaeger-es-rollover-29026510-fq8ng AddedInterface Add eth0 [10.129.2.35/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:00 +0000 UTC Normal Pod my-jaeger-es-rollover-29026510-fq8ng.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:146ea9aab82c1e7dd871e269a58e0491439b4ce3b98c281e46214fb813cd8194" already present on machine kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:00 +0000 UTC Normal Pod my-jaeger-es-rollover-29026510-fq8ng.spec.containers{my-jaeger-es-rollover} Created Created container: my-jaeger-es-rollover kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:00 +0000 UTC Normal Pod my-jaeger-es-rollover-29026510-fq8ng.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-29026510 SuccessfulCreate Created pod: my-jaeger-es-rollover-29026510-fq8ng job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-29026510 cronjob-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-29026510 Completed Job completed job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-29026510, condition: Complete cronjob-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-29026510 Completed Job completed job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-29026510, condition: Complete cronjob-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:13 +0000 UTC Normal Pod 05-check-indices-sdw5z Binding Scheduled Successfully assigned kuttl-test-one-peacock/05-check-indices-sdw5z to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:13 +0000 UTC Normal Job.batch 05-check-indices SuccessfulCreate Created pod: 05-check-indices-sdw5z job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:14 +0000 UTC Normal Pod 05-check-indices-sdw5z AddedInterface Add eth0 [10.129.2.37/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:14 +0000 UTC Normal Pod 05-check-indices-sdw5z.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:14 +0000 UTC Normal Pod 05-check-indices-sdw5z.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 647ms (647ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:14 +0000 UTC Normal Pod 05-check-indices-sdw5z.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:14 +0000 UTC Normal Pod 05-check-indices-sdw5z.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:17 +0000 UTC Normal Job.batch 05-check-indices Completed Job completed job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:18 +0000 UTC Normal Pod 06-check-indices-c5b4v Binding Scheduled Successfully assigned kuttl-test-one-peacock/06-check-indices-c5b4v to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:18 +0000 UTC Normal Job.batch 06-check-indices SuccessfulCreate Created pod: 06-check-indices-c5b4v job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:19 +0000 UTC Normal Pod 06-check-indices-c5b4v AddedInterface Add eth0 [10.129.2.38/23] from ovn-kubernetes multus logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:19 +0000 UTC Normal Pod 06-check-indices-c5b4v.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:19 +0000 UTC Normal Pod 06-check-indices-c5b4v.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 620ms (620ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:19 +0000 UTC Normal Pod 06-check-indices-c5b4v.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:19 +0000 UTC Normal Pod 06-check-indices-c5b4v.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:10:22 | es-rollover-autoprov | 2025-03-10 07:10:22 +0000 UTC Normal Job.batch 06-check-indices Completed Job completed job-controller logger.go:42: 07:10:22 | es-rollover-autoprov | Deleting namespace: kuttl-test-one-peacock === CONT kuttl/harness/es-increasing-replicas logger.go:42: 07:10:30 | es-increasing-replicas | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:10:30 | es-increasing-replicas | Ignoring check-es-nodes.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:10:30 | es-increasing-replicas | Ignoring openshift-check-es-nodes.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:10:30 | es-increasing-replicas | Creating namespace: kuttl-test-sensible-firefly logger.go:42: 07:10:30 | es-increasing-replicas/1-install | starting test step 1-install logger.go:42: 07:10:30 | es-increasing-replicas/1-install | Jaeger:kuttl-test-sensible-firefly/simple-prod created logger.go:42: 07:11:08 | es-increasing-replicas/1-install | test step completed 1-install logger.go:42: 07:11:08 | es-increasing-replicas/2-install | starting test step 2-install logger.go:42: 07:11:08 | es-increasing-replicas/2-install | Jaeger:kuttl-test-sensible-firefly/simple-prod updated logger.go:42: 07:11:24 | es-increasing-replicas/2-install | test step completed 2-install logger.go:42: 07:11:24 | es-increasing-replicas/3-smoke-test | starting test step 3-smoke-test logger.go:42: 07:11:24 | es-increasing-replicas/3-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 07:11:26 | es-increasing-replicas/3-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:11:32 | es-increasing-replicas/3-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:11:33 | es-increasing-replicas/3-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:11:33 | es-increasing-replicas/3-smoke-test | job.batch/report-span created logger.go:42: 07:11:33 | es-increasing-replicas/3-smoke-test | job.batch/check-span created logger.go:42: 07:11:45 | es-increasing-replicas/3-smoke-test | test step completed 3-smoke-test logger.go:42: 07:11:45 | es-increasing-replicas/4-install | starting test step 4-install logger.go:42: 07:11:45 | es-increasing-replicas/4-install | Jaeger:kuttl-test-sensible-firefly/simple-prod updated logger.go:42: 07:11:45 | es-increasing-replicas/4-install | test step completed 4-install logger.go:42: 07:11:45 | es-increasing-replicas/5-check-es-nodes | starting test step 5-check-es-nodes logger.go:42: 07:11:45 | es-increasing-replicas/5-check-es-nodes | running command: [sh -c ./check-es-nodes.sh $NAMESPACE] logger.go:42: 07:11:45 | es-increasing-replicas/5-check-es-nodes | Checking if the number of ES instances is the expected logger.go:42: 07:11:45 | es-increasing-replicas/5-check-es-nodes | false logger.go:42: 07:11:45 | es-increasing-replicas/5-check-es-nodes | Error: no matches found logger.go:42: 07:11:50 | es-increasing-replicas/5-check-es-nodes | Checking if the number of ES instances is the expected logger.go:42: 07:11:50 | es-increasing-replicas/5-check-es-nodes | true logger.go:42: 07:11:50 | es-increasing-replicas/5-check-es-nodes | test step completed 5-check-es-nodes logger.go:42: 07:11:50 | es-increasing-replicas | es-increasing-replicas events from ns kuttl-test-sensible-firefly: logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:10:37 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-1-c555zjjd Binding Scheduled Successfully assigned kuttl-test-sensible-firefly/elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-1-c555zjjd to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:10:37 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-1-c555zjjd AddedInterface Add eth0 [10.131.0.19/23] from ovn-kubernetes multus logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:10:37 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-1-c555zjjd.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:ebe8dc1ce7ba2f2badccbd4f55a96c60fccdc835fa3582b169ddd34fbf03ca76" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:10:37 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-1-c555zjjd.spec.containers{elasticsearch} Created Created container: elasticsearch kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:10:37 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-1-c555zjjd.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:10:37 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-1-c555zjjd.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:5e4e69d24d07f6efda72b0c0baddfd2979902bfa92eb0a707bd3ed822b7c4a4c" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:10:37 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-1-c555zjjd.spec.containers{proxy} Created Created container: proxy kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:10:37 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-1-c555zjjd.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:10:37 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-1-c55686fb7 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-1-c555zjjd replicaset-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:10:37 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-1-c55686fb7 from 0 to 1 deployment-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:10:50 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-1-c555zjjd.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:06 +0000 UTC Normal Pod simple-prod-collector-7dbbc7487b-tmjtp Binding Scheduled Successfully assigned kuttl-test-sensible-firefly/simple-prod-collector-7dbbc7487b-tmjtp to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:06 +0000 UTC Normal Pod simple-prod-collector-7dbbc7487b-tmjtp AddedInterface Add eth0 [10.129.2.39/23] from ovn-kubernetes multus logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:06 +0000 UTC Normal Pod simple-prod-collector-7dbbc7487b-tmjtp.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:40a1c7fa23aa5ffa64e8e22aa38022f5d4d7ff644c46a3da7169b713d486c3c1" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:06 +0000 UTC Normal Pod simple-prod-collector-7dbbc7487b-tmjtp.spec.containers{jaeger-collector} Created Created container: jaeger-collector kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:06 +0000 UTC Normal Pod simple-prod-collector-7dbbc7487b-tmjtp.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:06 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-7dbbc7487b SuccessfulCreate Created pod: simple-prod-collector-7dbbc7487b-tmjtp replicaset-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:06 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-7dbbc7487b from 0 to 1 deployment-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:06 +0000 UTC Normal Pod simple-prod-query-6bf645769-dnxz9 Binding Scheduled Successfully assigned kuttl-test-sensible-firefly/simple-prod-query-6bf645769-dnxz9 to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:06 +0000 UTC Normal Pod simple-prod-query-6bf645769-dnxz9 AddedInterface Add eth0 [10.128.2.31/23] from ovn-kubernetes multus logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:06 +0000 UTC Normal Pod simple-prod-query-6bf645769-dnxz9.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:06 +0000 UTC Normal Pod simple-prod-query-6bf645769-dnxz9.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:06 +0000 UTC Normal Pod simple-prod-query-6bf645769-dnxz9.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:06 +0000 UTC Normal Pod simple-prod-query-6bf645769-dnxz9.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:06 +0000 UTC Normal Pod simple-prod-query-6bf645769-dnxz9.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:06 +0000 UTC Normal Pod simple-prod-query-6bf645769-dnxz9.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:06 +0000 UTC Normal Pod simple-prod-query-6bf645769-dnxz9.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:06 +0000 UTC Normal Pod simple-prod-query-6bf645769-dnxz9.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:06 +0000 UTC Normal Pod simple-prod-query-6bf645769-dnxz9.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:06 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6bf645769 SuccessfulCreate Created pod: simple-prod-query-6bf645769-dnxz9 replicaset-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:06 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-6bf645769 from 0 to 1 deployment-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:12 +0000 UTC Normal Pod simple-prod-collector-7dbbc7487b-wgmwz Binding Scheduled Successfully assigned kuttl-test-sensible-firefly/simple-prod-collector-7dbbc7487b-wgmwz to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:12 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-7dbbc7487b SuccessfulCreate Created pod: simple-prod-collector-7dbbc7487b-wgmwz replicaset-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:12 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-7dbbc7487b from 1 to 2 deployment-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:12 +0000 UTC Normal Pod simple-prod-query-6bf645769-jczjh Binding Scheduled Successfully assigned kuttl-test-sensible-firefly/simple-prod-query-6bf645769-jczjh to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:12 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6bf645769 SuccessfulCreate Created pod: simple-prod-query-6bf645769-jczjh replicaset-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:12 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-6bf645769 from 1 to 2 deployment-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:13 +0000 UTC Normal Pod simple-prod-collector-7dbbc7487b-wgmwz AddedInterface Add eth0 [10.128.2.32/23] from ovn-kubernetes multus logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:13 +0000 UTC Normal Pod simple-prod-collector-7dbbc7487b-wgmwz.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:40a1c7fa23aa5ffa64e8e22aa38022f5d4d7ff644c46a3da7169b713d486c3c1" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:13 +0000 UTC Normal Pod simple-prod-collector-7dbbc7487b-wgmwz.spec.containers{jaeger-collector} Created Created container: jaeger-collector kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:13 +0000 UTC Normal Pod simple-prod-collector-7dbbc7487b-wgmwz.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:13 +0000 UTC Normal Pod simple-prod-query-6bf645769-jczjh AddedInterface Add eth0 [10.129.2.40/23] from ovn-kubernetes multus logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:13 +0000 UTC Normal Pod simple-prod-query-6bf645769-jczjh.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:15 +0000 UTC Normal Pod simple-prod-query-6bf645769-jczjh.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" in 2.371s (2.371s including waiting). Image size: 142020742 bytes. kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:15 +0000 UTC Normal Pod simple-prod-query-6bf645769-jczjh.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:15 +0000 UTC Normal Pod simple-prod-query-6bf645769-jczjh.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:16 +0000 UTC Normal Pod simple-prod-query-6bf645769-jczjh.spec.containers{oauth-proxy} Pulling Pulling image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:20 +0000 UTC Normal Pod simple-prod-query-6bf645769-jczjh.spec.containers{oauth-proxy} Pulled Successfully pulled image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" in 4.69s (4.69s including waiting). Image size: 438322369 bytes. kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:20 +0000 UTC Normal Pod simple-prod-query-6bf645769-jczjh.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:20 +0000 UTC Normal Pod simple-prod-query-6bf645769-jczjh.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:20 +0000 UTC Normal Pod simple-prod-query-6bf645769-jczjh.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:23 +0000 UTC Normal Pod simple-prod-query-6bf645769-jczjh.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" in 2.27s (2.27s including waiting). Image size: 112614125 bytes. kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:23 +0000 UTC Normal Pod simple-prod-query-6bf645769-jczjh.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:23 +0000 UTC Normal Pod simple-prod-query-6bf645769-jczjh.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:27 +0000 UTC Normal Pod simple-prod-query-6bf645769-dnxz9.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:27 +0000 UTC Normal Pod simple-prod-query-6bf645769-dnxz9.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:27 +0000 UTC Normal Pod simple-prod-query-6bf645769-dnxz9.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:27 +0000 UTC Normal Pod simple-prod-query-6bf645769-jczjh.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:27 +0000 UTC Normal Pod simple-prod-query-6bf645769-jczjh.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:27 +0000 UTC Normal Pod simple-prod-query-6bf645769-jczjh.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:27 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6bf645769 SuccessfulDelete Deleted pod: simple-prod-query-6bf645769-dnxz9 replicaset-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:27 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6bf645769 SuccessfulDelete Deleted pod: simple-prod-query-6bf645769-jczjh replicaset-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:27 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-6bf645769 from 2 to 0 deployment-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:28 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-m9wh8 Binding Scheduled Successfully assigned kuttl-test-sensible-firefly/simple-prod-query-594f4ffb5d-m9wh8 to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:28 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-qkkh9 Binding Scheduled Successfully assigned kuttl-test-sensible-firefly/simple-prod-query-594f4ffb5d-qkkh9 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:28 +0000 UTC Normal ReplicaSet.apps simple-prod-query-594f4ffb5d SuccessfulCreate Created pod: simple-prod-query-594f4ffb5d-qkkh9 replicaset-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:28 +0000 UTC Normal ReplicaSet.apps simple-prod-query-594f4ffb5d SuccessfulCreate Created pod: simple-prod-query-594f4ffb5d-m9wh8 replicaset-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:28 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-594f4ffb5d from 0 to 2 deployment-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:29 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-m9wh8 AddedInterface Add eth0 [10.128.2.33/23] from ovn-kubernetes multus logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:29 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-m9wh8.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:29 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-m9wh8.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:29 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-m9wh8.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:29 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-m9wh8.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:29 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-m9wh8.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:29 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-m9wh8.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:29 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-m9wh8.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:29 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-m9wh8.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:29 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-m9wh8.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:29 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-qkkh9 AddedInterface Add eth0 [10.129.2.41/23] from ovn-kubernetes multus logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:29 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-qkkh9.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:29 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-qkkh9.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:29 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-qkkh9.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:29 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-qkkh9.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:29 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-qkkh9.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:29 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-qkkh9.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:29 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-qkkh9.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:29 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-qkkh9.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:29 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-qkkh9.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:33 +0000 UTC Normal Pod check-span-bw7xq Binding Scheduled Successfully assigned kuttl-test-sensible-firefly/check-span-bw7xq to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:33 +0000 UTC Normal Pod check-span-bw7xq AddedInterface Add eth0 [10.129.2.42/23] from ovn-kubernetes multus logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:33 +0000 UTC Normal Pod check-span-bw7xq.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:33 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-bw7xq job-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:33 +0000 UTC Normal Pod report-span-z88xk Binding Scheduled Successfully assigned kuttl-test-sensible-firefly/report-span-z88xk to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:33 +0000 UTC Normal Pod report-span-z88xk AddedInterface Add eth0 [10.128.2.34/23] from ovn-kubernetes multus logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:33 +0000 UTC Normal Pod report-span-z88xk.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:33 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-z88xk job-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:34 +0000 UTC Normal Pod check-span-bw7xq.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 569ms (569ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:34 +0000 UTC Normal Pod check-span-bw7xq.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:34 +0000 UTC Normal Pod check-span-bw7xq.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:39 +0000 UTC Normal Pod report-span-z88xk.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 5.445s (5.445s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:39 +0000 UTC Normal Pod report-span-z88xk.spec.containers{report-span} Created Created container: report-span kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:39 +0000 UTC Normal Pod report-span-z88xk.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:44 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-2-57926rwc Binding Scheduled Successfully assigned kuttl-test-sensible-firefly/elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-2-57926rwc to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-2-57926rwc AddedInterface Add eth0 [10.129.2.43/23] from ovn-kubernetes multus logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-2-57926rwc.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:ebe8dc1ce7ba2f2badccbd4f55a96c60fccdc835fa3582b169ddd34fbf03ca76" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-2-57926rwc.spec.containers{elasticsearch} Created Created container: elasticsearch kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-2-57926rwc.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-2-57926rwc.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:5e4e69d24d07f6efda72b0c0baddfd2979902bfa92eb0a707bd3ed822b7c4a4c" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-2-57926rwc.spec.containers{proxy} Created Created container: proxy kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-2-57926rwc.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-2-57946968ff SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-2-57926rwc replicaset-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-2 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestsensiblefireflysimpleprod-2-57946968ff from 0 to 1 deployment-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Pod simple-prod-collector-7dbbc7487b-tmjtp.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Pod simple-prod-collector-7dbbc7487b-wgmwz.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-7dbbc7487b SuccessfulDelete Deleted pod: simple-prod-collector-7dbbc7487b-tmjtp replicaset-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-7dbbc7487b SuccessfulDelete Deleted pod: simple-prod-collector-7dbbc7487b-wgmwz replicaset-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Pod simple-prod-collector-7fb8cfd547-trkh8 Binding Scheduled Successfully assigned kuttl-test-sensible-firefly/simple-prod-collector-7fb8cfd547-trkh8 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Pod simple-prod-collector-7fb8cfd547-xpfkj Binding Scheduled Successfully assigned kuttl-test-sensible-firefly/simple-prod-collector-7fb8cfd547-xpfkj to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-7fb8cfd547 SuccessfulCreate Created pod: simple-prod-collector-7fb8cfd547-xpfkj replicaset-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-7fb8cfd547 SuccessfulCreate Created pod: simple-prod-collector-7fb8cfd547-trkh8 replicaset-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled down replica set simple-prod-collector-7dbbc7487b from 2 to 0 deployment-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-7fb8cfd547 from 0 to 2 deployment-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-m9wh8.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-m9wh8.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-m9wh8.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-qkkh9.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-qkkh9.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Pod simple-prod-query-594f4ffb5d-qkkh9.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal ReplicaSet.apps simple-prod-query-594f4ffb5d SuccessfulDelete Deleted pod: simple-prod-query-594f4ffb5d-qkkh9 replicaset-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal ReplicaSet.apps simple-prod-query-594f4ffb5d SuccessfulDelete Deleted pod: simple-prod-query-594f4ffb5d-m9wh8 replicaset-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-2gfq5 Binding Scheduled Successfully assigned kuttl-test-sensible-firefly/simple-prod-query-6fdcb6f96f-2gfq5 to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-pht72 Binding Scheduled Successfully assigned kuttl-test-sensible-firefly/simple-prod-query-6fdcb6f96f-pht72 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6fdcb6f96f SuccessfulCreate Created pod: simple-prod-query-6fdcb6f96f-2gfq5 replicaset-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6fdcb6f96f SuccessfulCreate Created pod: simple-prod-query-6fdcb6f96f-pht72 replicaset-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-594f4ffb5d from 2 to 0 deployment-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:47 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-6fdcb6f96f from 0 to 2 deployment-controller logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-collector-7fb8cfd547-trkh8 AddedInterface Add eth0 [10.129.2.45/23] from ovn-kubernetes multus logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-collector-7fb8cfd547-trkh8.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:40a1c7fa23aa5ffa64e8e22aa38022f5d4d7ff644c46a3da7169b713d486c3c1" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-collector-7fb8cfd547-trkh8.spec.containers{jaeger-collector} Created Created container: jaeger-collector kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-collector-7fb8cfd547-trkh8.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-collector-7fb8cfd547-xpfkj AddedInterface Add eth0 [10.128.2.36/23] from ovn-kubernetes multus logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-collector-7fb8cfd547-xpfkj.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:40a1c7fa23aa5ffa64e8e22aa38022f5d4d7ff644c46a3da7169b713d486c3c1" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-collector-7fb8cfd547-xpfkj.spec.containers{jaeger-collector} Created Created container: jaeger-collector kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-collector-7fb8cfd547-xpfkj.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-2gfq5 AddedInterface Add eth0 [10.128.2.35/23] from ovn-kubernetes multus logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-2gfq5.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-2gfq5.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-2gfq5.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-2gfq5.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-2gfq5.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-2gfq5.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-2gfq5.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-2gfq5.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-2gfq5.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-pht72 AddedInterface Add eth0 [10.129.2.44/23] from ovn-kubernetes multus logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-pht72.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-pht72.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-pht72.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-pht72.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-pht72.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-pht72.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:48 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-pht72.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:49 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-pht72.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:11:50 | es-increasing-replicas | 2025-03-10 07:11:49 +0000 UTC Normal Pod simple-prod-query-6fdcb6f96f-pht72.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:11:50 | es-increasing-replicas | Deleting namespace: kuttl-test-sensible-firefly === CONT kuttl/harness/es-index-cleaner-autoprov logger.go:42: 07:12:26 | es-index-cleaner-autoprov | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:12:26 | es-index-cleaner-autoprov | Creating namespace: kuttl-test-ace-ibex logger.go:42: 07:12:26 | es-index-cleaner-autoprov/1-install | starting test step 1-install logger.go:42: 07:12:26 | es-index-cleaner-autoprov/1-install | Jaeger:kuttl-test-ace-ibex/test-es-index-cleaner-with-prefix created logger.go:42: 07:13:04 | es-index-cleaner-autoprov/1-install | test step completed 1-install logger.go:42: 07:13:04 | es-index-cleaner-autoprov/2-report-spans | starting test step 2-report-spans logger.go:42: 07:13:04 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE test-es-index-cleaner-with-prefix /dev/null] logger.go:42: 07:13:05 | es-index-cleaner-autoprov/2-report-spans | Warning: resource jaegers/test-es-index-cleaner-with-prefix is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:13:13 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c DAYS=5 ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JOB_NUMBER=00 JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-00-job.yaml] logger.go:42: 07:13:14 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c kubectl apply -f report-span-00-job.yaml -n $NAMESPACE] logger.go:42: 07:13:14 | es-index-cleaner-autoprov/2-report-spans | job.batch/00-report-span created logger.go:42: 07:13:54 | es-index-cleaner-autoprov/2-report-spans | test step completed 2-report-spans logger.go:42: 07:13:54 | es-index-cleaner-autoprov/3-install | starting test step 3-install logger.go:42: 07:13:54 | es-index-cleaner-autoprov/3-install | Jaeger:kuttl-test-ace-ibex/test-es-index-cleaner-with-prefix updated logger.go:42: 07:13:54 | es-index-cleaner-autoprov/3-install | test step completed 3-install logger.go:42: 07:13:54 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | starting test step 4-wait-es-index-cleaner logger.go:42: 07:13:54 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | running command: [sh -c go run ../../../../cmd-utils/wait-cronjob/main.go --cronjob test-es-index-cleaner-with-prefix-es-index-cleaner --namespace $NAMESPACE] logger.go:42: 07:13:54 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2025-03-10T07:13:54Z" level=debug msg="Checking if the test-es-index-cleaner-with-prefix-es-index-cleaner CronJob exists" logger.go:42: 07:13:54 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2025-03-10T07:13:54Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 07:13:54 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2025-03-10T07:13:54Z" level=warning msg="The BatchV1/Cronjob test-es-index-cleaner-with-prefix-es-index-cleaner was not found" logger.go:42: 07:13:54 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2025-03-10T07:13:54Z" level=debug msg="Found BatchV/Cronjobs:" logger.go:42: 07:14:04 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2025-03-10T07:14:04Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 07:14:04 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2025-03-10T07:14:04Z" level=info msg="Cronjob test-es-index-cleaner-with-prefix-es-index-cleaner found successfully" logger.go:42: 07:14:04 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2025-03-10T07:14:04Z" level=debug msg="Waiting for the next scheduled job from test-es-index-cleaner-with-prefix-es-index-cleaner cronjob" logger.go:42: 07:14:04 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2025-03-10T07:14:04Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 07:14:14 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2025-03-10T07:14:14Z" level=info msg="Job of owner test-es-index-cleaner-with-prefix-es-index-cleaner succeeded after test-es-index-cleaner-with-prefix-es-index-cleaner 10.023902174s" logger.go:42: 07:14:14 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | test step completed 4-wait-es-index-cleaner logger.go:42: 07:14:14 | es-index-cleaner-autoprov/5-install | starting test step 5-install logger.go:42: 07:14:14 | es-index-cleaner-autoprov/5-install | Jaeger:kuttl-test-ace-ibex/test-es-index-cleaner-with-prefix updated logger.go:42: 07:14:14 | es-index-cleaner-autoprov/5-install | test step completed 5-install logger.go:42: 07:14:14 | es-index-cleaner-autoprov/6-check-indices | starting test step 6-check-indices logger.go:42: 07:14:14 | es-index-cleaner-autoprov/6-check-indices | Job:kuttl-test-ace-ibex/00-check-indices created logger.go:42: 07:14:18 | es-index-cleaner-autoprov/6-check-indices | test step completed 6-check-indices logger.go:42: 07:14:18 | es-index-cleaner-autoprov | es-index-cleaner-autoprov events from ns kuttl-test-ace-ibex: logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:12:32 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestaceibextestesindexcleanerw-1-6575c5755c SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestaceibextestesindexcleanerw-1-65xcdx8 replicaset-controller logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:12:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestaceibextestesindexcleanerw-1-65xcdx8 Binding Scheduled Successfully assigned kuttl-test-ace-ibex/elasticsearch-cdm-kuttltestaceibextestesindexcleanerw-1-65xcdx8 to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:12:32 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestaceibextestesindexcleanerw-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestaceibextestesindexcleanerw-1-6575c5755c from 0 to 1 deployment-controller logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:12:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestaceibextestesindexcleanerw-1-65xcdx8 AddedInterface Add eth0 [10.131.0.20/23] from ovn-kubernetes multus logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:12:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestaceibextestesindexcleanerw-1-65xcdx8.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:ebe8dc1ce7ba2f2badccbd4f55a96c60fccdc835fa3582b169ddd34fbf03ca76" already present on machine kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:12:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestaceibextestesindexcleanerw-1-65xcdx8.spec.containers{elasticsearch} Created Created container: elasticsearch kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:12:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestaceibextestesindexcleanerw-1-65xcdx8.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:12:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestaceibextestesindexcleanerw-1-65xcdx8.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:5e4e69d24d07f6efda72b0c0baddfd2979902bfa92eb0a707bd3ed822b7c4a4c" already present on machine kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:12:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestaceibextestesindexcleanerw-1-65xcdx8.spec.containers{proxy} Created Created container: proxy kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:12:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestaceibextestesindexcleanerw-1-65xcdx8.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:12:45 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestaceibextestesindexcleanerw-1-65xcdx8.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:12:50 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestaceibextestesindexcleanerw-1-65xcdx8.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:01 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-6584844fc7-r2q87 Binding Scheduled Successfully assigned kuttl-test-ace-ibex/test-es-index-cleaner-with-prefix-collector-6584844fc7-r2q87 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:01 +0000 UTC Warning Pod test-es-index-cleaner-with-prefix-collector-6584844fc7-r2q87 FailedMount MountVolume.SetUp failed for volume "test-es-index-cleaner-with-prefix-collector-tls-config-volume" : secret "test-es-index-cleaner-with-prefix-collector-headless-tls" not found kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:01 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-collector-6584844fc7 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-collector-6584844fc7-r2q87 replicaset-controller logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:01 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-collector ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-collector-6584844fc7 from 0 to 1 deployment-controller logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:01 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-7dfdfcd78f-9lzbp Binding Scheduled Successfully assigned kuttl-test-ace-ibex/test-es-index-cleaner-with-prefix-query-7dfdfcd78f-9lzbp to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:01 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-7dfdfcd78f-9lzbp AddedInterface Add eth0 [10.128.2.37/23] from ovn-kubernetes multus logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:01 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-7dfdfcd78f-9lzbp.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:01 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-7dfdfcd78f-9lzbp.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:01 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-7dfdfcd78f-9lzbp.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:01 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-7dfdfcd78f-9lzbp.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:01 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-7dfdfcd78f SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-query-7dfdfcd78f-9lzbp replicaset-controller logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:01 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-query-7dfdfcd78f from 0 to 1 deployment-controller logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:02 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-6584844fc7-r2q87 AddedInterface Add eth0 [10.129.2.46/23] from ovn-kubernetes multus logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:02 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-6584844fc7-r2q87.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:40a1c7fa23aa5ffa64e8e22aa38022f5d4d7ff644c46a3da7169b713d486c3c1" already present on machine kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:02 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-6584844fc7-r2q87.spec.containers{jaeger-collector} Created Created container: jaeger-collector kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:02 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-6584844fc7-r2q87.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:02 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-7dfdfcd78f-9lzbp.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:02 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-7dfdfcd78f-9lzbp.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:02 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-7dfdfcd78f-9lzbp.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:02 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-7dfdfcd78f-9lzbp.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:02 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-7dfdfcd78f-9lzbp.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:09 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-7dfdfcd78f-9lzbp.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:09 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-7dfdfcd78f-9lzbp.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:09 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-7dfdfcd78f-9lzbp.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:09 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-7dfdfcd78f SuccessfulDelete Deleted pod: test-es-index-cleaner-with-prefix-query-7dfdfcd78f-9lzbp replicaset-controller logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:09 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled down replica set test-es-index-cleaner-with-prefix-query-7dfdfcd78f from 1 to 0 deployment-controller logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:10 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6cb44d5c75-bt8xd Binding Scheduled Successfully assigned kuttl-test-ace-ibex/test-es-index-cleaner-with-prefix-query-6cb44d5c75-bt8xd to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:10 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6cb44d5c75-bt8xd AddedInterface Add eth0 [10.128.2.38/23] from ovn-kubernetes multus logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:10 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6cb44d5c75-bt8xd.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:10 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6cb44d5c75-bt8xd.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:10 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6cb44d5c75-bt8xd.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:10 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6cb44d5c75-bt8xd.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:10 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6cb44d5c75-bt8xd.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:10 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6cb44d5c75-bt8xd.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:10 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6cb44d5c75-bt8xd.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:10 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6cb44d5c75-bt8xd.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:10 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6cb44d5c75-bt8xd.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:10 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-6cb44d5c75 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-query-6cb44d5c75-bt8xd replicaset-controller logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:10 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-query-6cb44d5c75 from 0 to 1 deployment-controller logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:14 +0000 UTC Normal Pod 00-report-span-5xrxp Binding Scheduled Successfully assigned kuttl-test-ace-ibex/00-report-span-5xrxp to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:14 +0000 UTC Normal Job.batch 00-report-span SuccessfulCreate Created pod: 00-report-span-5xrxp job-controller logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:15 +0000 UTC Normal Pod 00-report-span-5xrxp AddedInterface Add eth0 [10.129.2.47/23] from ovn-kubernetes multus logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:15 +0000 UTC Normal Pod 00-report-span-5xrxp.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:15 +0000 UTC Normal Pod 00-report-span-5xrxp.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 753ms (753ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:15 +0000 UTC Normal Pod 00-report-span-5xrxp.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:15 +0000 UTC Normal Pod 00-report-span-5xrxp.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:16 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:16 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:16 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:31 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:31 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod test-es-index-cleaner-with-prefix-collector-6584844fc7-r2q87 horizontal-pod-autoscaler logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:31 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:46 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod test-es-index-cleaner-with-prefix-collector-6584844fc7-r2q87 horizontal-pod-autoscaler logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:46 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod test-es-index-cleaner-with-prefix-collector-6584844fc7-r2q87 horizontal-pod-autoscaler logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:13:53 +0000 UTC Normal Job.batch 00-report-span Completed Job completed job-controller logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:14:00 +0000 UTC Normal Job.batch test-es-index-cleaner-with-prefix-es-index-cleaner-29026514 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-es-index-cleaner-2902651zmd96 job-controller logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:14:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2902651zmd96 Binding Scheduled Successfully assigned kuttl-test-ace-ibex/test-es-index-cleaner-with-prefix-es-index-cleaner-2902651zmd96 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:14:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2902651zmd96 AddedInterface Add eth0 [10.129.2.48/23] from ovn-kubernetes multus logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:14:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2902651zmd96.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-index-cleaner-rhel8@sha256:3d41eed6b25f86a907a5935bd96ed634148af2b8869fd92109448b255c2d17c0" kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:14:00 +0000 UTC Normal CronJob.batch test-es-index-cleaner-with-prefix-es-index-cleaner SuccessfulCreate Created job test-es-index-cleaner-with-prefix-es-index-cleaner-29026514 cronjob-controller logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:14:03 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2902651zmd96.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-index-cleaner-rhel8@sha256:3d41eed6b25f86a907a5935bd96ed634148af2b8869fd92109448b255c2d17c0" in 2.497s (2.497s including waiting). Image size: 103733580 bytes. kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:14:03 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2902651zmd96.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Created Created container: test-es-index-cleaner-with-prefix-es-index-cleaner kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:14:03 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2902651zmd96.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Started Started container test-es-index-cleaner-with-prefix-es-index-cleaner kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:14:06 +0000 UTC Normal Job.batch test-es-index-cleaner-with-prefix-es-index-cleaner-29026514 Completed Job completed job-controller logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:14:06 +0000 UTC Normal CronJob.batch test-es-index-cleaner-with-prefix-es-index-cleaner SawCompletedJob Saw completed job: test-es-index-cleaner-with-prefix-es-index-cleaner-29026514, condition: Complete cronjob-controller logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:14:14 +0000 UTC Normal Pod 00-check-indices-kk8w6 Binding Scheduled Successfully assigned kuttl-test-ace-ibex/00-check-indices-kk8w6 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:14:14 +0000 UTC Normal Job.batch 00-check-indices SuccessfulCreate Created pod: 00-check-indices-kk8w6 job-controller logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:14:15 +0000 UTC Normal Pod 00-check-indices-kk8w6 AddedInterface Add eth0 [10.129.2.49/23] from ovn-kubernetes multus logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:14:15 +0000 UTC Normal Pod 00-check-indices-kk8w6.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:14:15 +0000 UTC Normal Pod 00-check-indices-kk8w6.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 552ms (552ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:14:15 +0000 UTC Normal Pod 00-check-indices-kk8w6.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:14:15 +0000 UTC Normal Pod 00-check-indices-kk8w6.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:14:18 | es-index-cleaner-autoprov | 2025-03-10 07:14:18 +0000 UTC Normal Job.batch 00-check-indices Completed Job completed job-controller logger.go:42: 07:14:18 | es-index-cleaner-autoprov | Deleting namespace: kuttl-test-ace-ibex === CONT kuttl/harness/es-from-aio-to-production logger.go:42: 07:14:25 | es-from-aio-to-production | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:14:25 | es-from-aio-to-production | Creating namespace: kuttl-test-meet-lemur logger.go:42: 07:14:25 | es-from-aio-to-production/0-install | starting test step 0-install logger.go:42: 07:14:25 | es-from-aio-to-production/0-install | Jaeger:kuttl-test-meet-lemur/my-jaeger created logger.go:42: 07:14:34 | es-from-aio-to-production/0-install | test step completed 0-install logger.go:42: 07:14:34 | es-from-aio-to-production/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:14:34 | es-from-aio-to-production/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:14:35 | es-from-aio-to-production/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:14:42 | es-from-aio-to-production/1-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:14:42 | es-from-aio-to-production/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:14:42 | es-from-aio-to-production/1-smoke-test | job.batch/report-span created logger.go:42: 07:14:42 | es-from-aio-to-production/1-smoke-test | job.batch/check-span created logger.go:42: 07:14:56 | es-from-aio-to-production/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:14:56 | es-from-aio-to-production/3-install | starting test step 3-install logger.go:42: 07:14:56 | es-from-aio-to-production/3-install | Jaeger:kuttl-test-meet-lemur/my-jaeger updated logger.go:42: 07:15:32 | es-from-aio-to-production/3-install | test step completed 3-install logger.go:42: 07:15:32 | es-from-aio-to-production/4-smoke-test | starting test step 4-smoke-test logger.go:42: 07:15:32 | es-from-aio-to-production/4-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:15:39 | es-from-aio-to-production/4-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:15:40 | es-from-aio-to-production/4-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:15:40 | es-from-aio-to-production/4-smoke-test | job.batch/report-span unchanged logger.go:42: 07:15:40 | es-from-aio-to-production/4-smoke-test | job.batch/check-span unchanged logger.go:42: 07:15:40 | es-from-aio-to-production/4-smoke-test | test step completed 4-smoke-test logger.go:42: 07:15:40 | es-from-aio-to-production | es-from-aio-to-production events from ns kuttl-test-meet-lemur: logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:29 +0000 UTC Normal Pod my-jaeger-76dd4b95cb-kt7cp Binding Scheduled Successfully assigned kuttl-test-meet-lemur/my-jaeger-76dd4b95cb-kt7cp to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:29 +0000 UTC Warning Pod my-jaeger-76dd4b95cb-kt7cp FailedMount MountVolume.SetUp failed for volume "my-jaeger-ui-oauth-proxy-tls" : secret "my-jaeger-ui-oauth-proxy-tls" not found kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:29 +0000 UTC Normal ReplicaSet.apps my-jaeger-76dd4b95cb SuccessfulCreate Created pod: my-jaeger-76dd4b95cb-kt7cp replicaset-controller logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:29 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-76dd4b95cb from 0 to 1 deployment-controller logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:30 +0000 UTC Normal Pod my-jaeger-76dd4b95cb-kt7cp AddedInterface Add eth0 [10.129.2.50/23] from ovn-kubernetes multus logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:30 +0000 UTC Normal Pod my-jaeger-76dd4b95cb-kt7cp.spec.containers{jaeger} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:33 +0000 UTC Normal Pod my-jaeger-76dd4b95cb-kt7cp.spec.containers{jaeger} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" in 3.289s (3.289s including waiting). Image size: 144304495 bytes. kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:33 +0000 UTC Normal Pod my-jaeger-76dd4b95cb-kt7cp.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:33 +0000 UTC Normal Pod my-jaeger-76dd4b95cb-kt7cp.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:33 +0000 UTC Normal Pod my-jaeger-76dd4b95cb-kt7cp.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:34 +0000 UTC Normal Pod my-jaeger-76dd4b95cb-kt7cp.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:34 +0000 UTC Normal Pod my-jaeger-76dd4b95cb-kt7cp.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:37 +0000 UTC Normal Pod my-jaeger-76dd4b95cb-kt7cp.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:37 +0000 UTC Normal Pod my-jaeger-76dd4b95cb-kt7cp.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:37 +0000 UTC Normal ReplicaSet.apps my-jaeger-76dd4b95cb SuccessfulDelete Deleted pod: my-jaeger-76dd4b95cb-kt7cp replicaset-controller logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:37 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-76dd4b95cb from 1 to 0 deployment-controller logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:38 +0000 UTC Normal Pod my-jaeger-7bf86f7f88-wpkdv Binding Scheduled Successfully assigned kuttl-test-meet-lemur/my-jaeger-7bf86f7f88-wpkdv to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:38 +0000 UTC Normal Pod my-jaeger-7bf86f7f88-wpkdv AddedInterface Add eth0 [10.129.2.51/23] from ovn-kubernetes multus logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:38 +0000 UTC Normal Pod my-jaeger-7bf86f7f88-wpkdv.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:38 +0000 UTC Normal ReplicaSet.apps my-jaeger-7bf86f7f88 SuccessfulCreate Created pod: my-jaeger-7bf86f7f88-wpkdv replicaset-controller logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:38 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-7bf86f7f88 from 0 to 1 deployment-controller logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:39 +0000 UTC Normal Pod my-jaeger-7bf86f7f88-wpkdv.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:39 +0000 UTC Normal Pod my-jaeger-7bf86f7f88-wpkdv.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:39 +0000 UTC Normal Pod my-jaeger-7bf86f7f88-wpkdv.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:39 +0000 UTC Normal Pod my-jaeger-7bf86f7f88-wpkdv.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:39 +0000 UTC Normal Pod my-jaeger-7bf86f7f88-wpkdv.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:42 +0000 UTC Normal Pod check-span-g84sl Binding Scheduled Successfully assigned kuttl-test-meet-lemur/check-span-g84sl to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:42 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-g84sl job-controller logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:42 +0000 UTC Normal Pod report-span-9ltzm Binding Scheduled Successfully assigned kuttl-test-meet-lemur/report-span-9ltzm to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:42 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-9ltzm job-controller logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:43 +0000 UTC Normal Pod check-span-g84sl AddedInterface Add eth0 [10.131.0.22/23] from ovn-kubernetes multus logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:43 +0000 UTC Normal Pod check-span-g84sl.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:43 +0000 UTC Normal Pod report-span-9ltzm AddedInterface Add eth0 [10.131.0.21/23] from ovn-kubernetes multus logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:43 +0000 UTC Normal Pod report-span-9ltzm.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:45 +0000 UTC Normal Pod check-span-g84sl.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 1.742s (1.742s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:45 +0000 UTC Normal Pod check-span-g84sl.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:45 +0000 UTC Normal Pod check-span-g84sl.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:45 +0000 UTC Normal Pod report-span-9ltzm.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 1.805s (1.805s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:45 +0000 UTC Normal Pod report-span-9ltzm.spec.containers{report-span} Created Created container: report-span kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:45 +0000 UTC Normal Pod report-span-9ltzm.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:14:55 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:00 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmeetlemurmyjaeger-1-6865fd55dc-f5sdf Binding Scheduled Successfully assigned kuttl-test-meet-lemur/elasticsearch-cdm-kuttltestmeetlemurmyjaeger-1-6865fd55dc-f5sdf to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:00 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestmeetlemurmyjaeger-1-6865fd55dc SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestmeetlemurmyjaeger-1-6865fd55dc-f5sdf replicaset-controller logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:00 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestmeetlemurmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestmeetlemurmyjaeger-1-6865fd55dc from 0 to 1 deployment-controller logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmeetlemurmyjaeger-1-6865fd55dc-f5sdf AddedInterface Add eth0 [10.131.0.23/23] from ovn-kubernetes multus logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmeetlemurmyjaeger-1-6865fd55dc-f5sdf.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:ebe8dc1ce7ba2f2badccbd4f55a96c60fccdc835fa3582b169ddd34fbf03ca76" already present on machine kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmeetlemurmyjaeger-1-6865fd55dc-f5sdf.spec.containers{elasticsearch} Created Created container: elasticsearch kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmeetlemurmyjaeger-1-6865fd55dc-f5sdf.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmeetlemurmyjaeger-1-6865fd55dc-f5sdf.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:5e4e69d24d07f6efda72b0c0baddfd2979902bfa92eb0a707bd3ed822b7c4a4c" already present on machine kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmeetlemurmyjaeger-1-6865fd55dc-f5sdf.spec.containers{proxy} Created Created container: proxy kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmeetlemurmyjaeger-1-6865fd55dc-f5sdf.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:12 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestmeetlemurmyjaeger-1-6865fd55dc-f5sdf.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:17 +0000 UTC Normal Job.batch report-span Completed Job completed job-controller logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:18 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestmeetlemurmyjaeger-1-6865fd55dc-f5sdf.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:29 +0000 UTC Normal Pod my-jaeger-7bf86f7f88-wpkdv.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:29 +0000 UTC Normal Pod my-jaeger-7bf86f7f88-wpkdv.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:29 +0000 UTC Normal Pod my-jaeger-collector-574db7897d-v7hsq Binding Scheduled Successfully assigned kuttl-test-meet-lemur/my-jaeger-collector-574db7897d-v7hsq to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:29 +0000 UTC Normal Pod my-jaeger-collector-574db7897d-v7hsq AddedInterface Add eth0 [10.128.2.40/23] from ovn-kubernetes multus logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:29 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-574db7897d SuccessfulCreate Created pod: my-jaeger-collector-574db7897d-v7hsq replicaset-controller logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:29 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-574db7897d from 0 to 1 deployment-controller logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:29 +0000 UTC Normal Pod my-jaeger-query-6477ff866b-qt7pq Binding Scheduled Successfully assigned kuttl-test-meet-lemur/my-jaeger-query-6477ff866b-qt7pq to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:29 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-6477ff866b SuccessfulCreate Created pod: my-jaeger-query-6477ff866b-qt7pq replicaset-controller logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:29 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-6477ff866b from 0 to 1 deployment-controller logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:30 +0000 UTC Normal Pod my-jaeger-collector-574db7897d-v7hsq.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:40a1c7fa23aa5ffa64e8e22aa38022f5d4d7ff644c46a3da7169b713d486c3c1" already present on machine kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:30 +0000 UTC Normal Pod my-jaeger-collector-574db7897d-v7hsq.spec.containers{jaeger-collector} Created Created container: jaeger-collector kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:30 +0000 UTC Normal Pod my-jaeger-collector-574db7897d-v7hsq.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:30 +0000 UTC Normal Pod my-jaeger-query-6477ff866b-qt7pq AddedInterface Add eth0 [10.128.2.41/23] from ovn-kubernetes multus logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:30 +0000 UTC Normal Pod my-jaeger-query-6477ff866b-qt7pq.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:30 +0000 UTC Normal Pod my-jaeger-query-6477ff866b-qt7pq.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:30 +0000 UTC Normal Pod my-jaeger-query-6477ff866b-qt7pq.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:30 +0000 UTC Normal Pod my-jaeger-query-6477ff866b-qt7pq.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:30 +0000 UTC Normal Pod my-jaeger-query-6477ff866b-qt7pq.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:30 +0000 UTC Normal Pod my-jaeger-query-6477ff866b-qt7pq.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:30 +0000 UTC Normal Pod my-jaeger-query-6477ff866b-qt7pq.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:30 +0000 UTC Normal Pod my-jaeger-query-6477ff866b-qt7pq.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | 2025-03-10 07:15:30 +0000 UTC Normal Pod my-jaeger-query-6477ff866b-qt7pq.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:15:40 | es-from-aio-to-production | Deleting namespace: kuttl-test-meet-lemur === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (703.71s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.00s) --- PASS: kuttl/harness/es-multiinstance (115.65s) --- PASS: kuttl/harness/es-simple-prod (6.13s) --- PASS: kuttl/harness/es-rollover-autoprov (258.73s) --- PASS: kuttl/harness/es-increasing-replicas (115.95s) --- PASS: kuttl/harness/es-index-cleaner-autoprov (119.20s) --- PASS: kuttl/harness/es-from-aio-to-production (81.88s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name elasticsearch --report --output /logs/artifacts/elasticsearch.xml ./artifacts/kuttl-report.xml time="2025-03-10T07:15:48Z" level=debug msg="Setting a new name for the test suites" time="2025-03-10T07:15:48Z" level=debug msg="Removing 'artifacts' TestCase" time="2025-03-10T07:15:48Z" level=debug msg="normalizing test case names" time="2025-03-10T07:15:48Z" level=debug msg="elasticsearch/artifacts -> elasticsearch_artifacts" time="2025-03-10T07:15:48Z" level=debug msg="elasticsearch/es-multiinstance -> elasticsearch_es_multiinstance" time="2025-03-10T07:15:48Z" level=debug msg="elasticsearch/es-simple-prod -> elasticsearch_es_simple_prod" time="2025-03-10T07:15:48Z" level=debug msg="elasticsearch/es-rollover-autoprov -> elasticsearch_es_rollover_autoprov" time="2025-03-10T07:15:48Z" level=debug msg="elasticsearch/es-increasing-replicas -> elasticsearch_es_increasing_replicas" time="2025-03-10T07:15:48Z" level=debug msg="elasticsearch/es-index-cleaner-autoprov -> elasticsearch_es_index_cleaner_autoprov" time="2025-03-10T07:15:48Z" level=debug msg="elasticsearch/es-from-aio-to-production -> elasticsearch_es_from_aio_to_production" +-----------------------------------------+--------+ | NAME | RESULT | +-----------------------------------------+--------+ | elasticsearch_artifacts | passed | | elasticsearch_es_multiinstance | passed | | elasticsearch_es_simple_prod | passed | | elasticsearch_es_rollover_autoprov | passed | | elasticsearch_es_increasing_replicas | passed | | elasticsearch_es_index_cleaner_autoprov | passed | | elasticsearch_es_from_aio_to_production | passed | +-----------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh examples false true + '[' 3 -ne 3 ']' + test_suite_name=examples + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/examples.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-examples make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=3.6.0 \ SKIP_KAFKA=false \ VERTX_IMG=jaegertracing/vertx-create-span:operator-e2e-tests \ ./tests/e2e/examples/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.19.0-0.nightly-2025-03-09-063419 True False 19m Cluster version is 4.19.0-0.nightly-2025-03-09-063419' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.19.0-0.nightly-2025-03-09-063419 True False 19m Cluster version is 4.19.0-0.nightly-2025-03-09-063419' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/examples/render.sh ++ export SUITE_DIR=./tests/e2e/examples ++ SUITE_DIR=./tests/e2e/examples ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/examples ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test examples-agent-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-agent-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-with-priority-class\e[0m' Rendering files for test examples-agent-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build + '[' _build '!=' _build ']' + mkdir -p examples-agent-with-priority-class + cd examples-agent-with-priority-class + example_name=agent-with-priority-class + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + '[' true '!=' true ']' + render_install_example agent-with-priority-class 02 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=02 + install_file=./02-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/agent-with-priority-class.yaml -o ./02-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./02-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./02-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./02-install.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + JAEGER_NAME=agent-as-daemonset + local jaeger_strategy ++ get_jaeger_strategy ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./02-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./02-install.yaml ++ strategy=DaemonSet ++ '[' DaemonSet = null ']' ++ echo DaemonSet ++ return 0 + jaeger_strategy=DaemonSet + '[' DaemonSet = DaemonSet ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./02-assert.yaml + render_smoke_test_example agent-with-priority-class 02 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + jaeger_name=agent-as-daemonset + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test agent-as-daemonset true 02 + '[' 3 -ne 3 ']' + jaeger=agent-as-daemonset + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-all-in-one-with-options + '[' 1 -ne 1 ']' + test_name=examples-all-in-one-with-options + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-all-in-one-with-options' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-all-in-one-with-options\e[0m' Rendering files for test examples-all-in-one-with-options + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-with-priority-class + '[' examples-agent-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-all-in-one-with-options + cd examples-all-in-one-with-options + example_name=all-in-one-with-options + render_install_example all-in-one-with-options 00 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/all-in-one-with-options.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + JAEGER_NAME=my-jaeger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.metadata.name="my-jaeger"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i 'del(.spec.allInOne.image)' ./00-install.yaml + render_smoke_test_example all-in-one-with-options 01 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + jaeger_name=my-jaeger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + sed -i s~my-jaeger-query:443~my-jaeger-query:443/jaeger~gi ./01-smoke-test.yaml + start_test examples-business-application-injected-sidecar + '[' 1 -ne 1 ']' + test_name=examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-business-application-injected-sidecar' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-business-application-injected-sidecar\e[0m' Rendering files for test examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-all-in-one-with-options + '[' examples-all-in-one-with-options '!=' _build ']' + cd .. + mkdir -p examples-business-application-injected-sidecar + cd examples-business-application-injected-sidecar + example_name=simplest + cp /tmp/jaeger-tests/examples/business-application-injected-sidecar.yaml ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].image=strenv(VERTX_IMG)' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.path="/"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.port=8080' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.initialDelaySeconds=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.failureThreshold=3' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.periodSeconds=10' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.successThreshold=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.timeoutSeconds=1' ./00-install.yaml + render_install_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example simplest 02 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 02 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-collector-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-collector-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-collector-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-collector-with-priority-class\e[0m' Rendering files for test examples-collector-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-business-application-injected-sidecar + '[' examples-business-application-injected-sidecar '!=' _build ']' + cd .. + mkdir -p examples-collector-with-priority-class + cd examples-collector-with-priority-class + example_name=collector-with-priority-class + render_install_example collector-with-priority-class 00 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/collector-with-priority-class.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + JAEGER_NAME=collector-with-high-priority + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example collector-with-priority-class 01 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + jaeger_name=collector-with-high-priority + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test collector-with-high-priority true 01 + '[' 3 -ne 3 ']' + jaeger=collector-with-high-priority + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + export JAEGER_NAME=collector-with-high-priority + JAEGER_NAME=collector-with-high-priority + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-service-types + '[' 1 -ne 1 ']' + test_name=examples-service-types + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-service-types' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-service-types\e[0m' Rendering files for test examples-service-types + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-collector-with-priority-class + '[' examples-collector-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-service-types + cd examples-service-types + example_name=service-types + render_install_example service-types 00 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/service-types.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + JAEGER_NAME=service-types + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example service-types 01 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/service-types.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/service-types.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/service-types.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/service-types.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + jaeger_name=service-types + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test service-types true 01 + '[' 3 -ne 3 ']' + jaeger=service-types + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + export JAEGER_NAME=service-types + JAEGER_NAME=service-types + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod + '[' 1 -ne 1 ']' + test_name=examples-simple-prod + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod\e[0m' Rendering files for test examples-simple-prod + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-service-types + '[' examples-service-types '!=' _build ']' + cd .. + mkdir -p examples-simple-prod + cd examples-simple-prod + example_name=simple-prod + render_install_example simple-prod 01 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod 02 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod-with-volumes + '[' 1 -ne 1 ']' + test_name=examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod-with-volumes' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod-with-volumes\e[0m' Rendering files for test examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod + '[' examples-simple-prod '!=' _build ']' + cd .. + mkdir -p examples-simple-prod-with-volumes + cd examples-simple-prod-with-volumes + example_name=simple-prod-with-volumes + render_install_example simple-prod-with-volumes 01 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod-with-volumes 02 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + /tmp/jaeger-tests/bin/gomplate -f ./03-check-volume.yaml.template -o 03-check-volume.yaml + start_test examples-simplest + '[' 1 -ne 1 ']' + test_name=examples-simplest + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simplest' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simplest\e[0m' Rendering files for test examples-simplest + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod-with-volumes + '[' examples-simple-prod-with-volumes '!=' _build ']' + cd .. + mkdir -p examples-simplest + cd examples-simplest + example_name=simplest + render_install_example simplest 00 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 01 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger + '[' 1 -ne 1 ']' + test_name=examples-with-badger + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger\e[0m' Rendering files for test examples-with-badger + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simplest + '[' examples-simplest '!=' _build ']' + cd .. + mkdir -p examples-with-badger + cd examples-with-badger + example_name=with-badger + render_install_example with-badger 00 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + JAEGER_NAME=with-badger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger 01 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + jaeger_name=with-badger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + export JAEGER_NAME=with-badger + JAEGER_NAME=with-badger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger-and-volume + '[' 1 -ne 1 ']' + test_name=examples-with-badger-and-volume + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger-and-volume' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger-and-volume\e[0m' Rendering files for test examples-with-badger-and-volume + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger + '[' examples-with-badger '!=' _build ']' + cd .. + mkdir -p examples-with-badger-and-volume + cd examples-with-badger-and-volume + example_name=with-badger-and-volume + render_install_example with-badger-and-volume 00 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger-and-volume.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + JAEGER_NAME=with-badger-and-volume + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger-and-volume 01 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + jaeger_name=with-badger-and-volume + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger-and-volume true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger-and-volume + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + export JAEGER_NAME=with-badger-and-volume + JAEGER_NAME=with-badger-and-volume + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-agent-as-daemonset + '[' 1 -ne 1 ']' + test_name=examples-agent-as-daemonset + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-as-daemonset' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-as-daemonset\e[0m' Rendering files for test examples-agent-as-daemonset + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger-and-volume + '[' examples-with-badger-and-volume '!=' _build ']' + cd .. + mkdir -p examples-agent-as-daemonset + cd examples-agent-as-daemonset + '[' true = true ']' + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/agent-as-daemonset.yaml -o 02-install.yaml + '[' true = true ']' + start_test examples-openshift-with-htpasswd + '[' 1 -ne 1 ']' + test_name=examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-openshift-with-htpasswd' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-openshift-with-htpasswd\e[0m' Rendering files for test examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-as-daemonset + '[' examples-agent-as-daemonset '!=' _build ']' + cd .. + mkdir -p examples-openshift-with-htpasswd + cd examples-openshift-with-htpasswd + export JAEGER_NAME=with-htpasswd + JAEGER_NAME=with-htpasswd + export JAEGER_USERNAME=awesomeuser + JAEGER_USERNAME=awesomeuser + export JAEGER_PASSWORD=awesomepassword + JAEGER_PASSWORD=awesomepassword + export 'JAEGER_USER_PASSWORD_HASH=awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' + JAEGER_USER_PASSWORD_HASH='awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ echo 'awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ base64 + SECRET=YXdlc29tZXVzZXI6e1NIQX11VWRxUFZVeXFOQm1FUlUwUXhqM0tGYVpuanc9Cg== + /tmp/jaeger-tests/bin/gomplate -f ./00-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/with-htpasswd.yaml -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + export 'GET_URL_COMMAND=kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + export 'URL=https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + INSECURE=true + JAEGER_USERNAME= + JAEGER_PASSWORD= + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-unsecured.yaml + JAEGER_USERNAME=wronguser + JAEGER_PASSWORD=wrongpassword + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-check-unauthorized.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./04-check-authorized.yaml + skip_test examples-agent-as-daemonset 'This test is flaky in Prow CI' + '[' 2 -ne 2 ']' + test_name=examples-agent-as-daemonset + message='This test is flaky in Prow CI' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-openshift-with-htpasswd + '[' examples-openshift-with-htpasswd '!=' _build ']' + cd .. + rm -rf examples-agent-as-daemonset + warning 'examples-agent-as-daemonset: This test is flaky in Prow CI' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: examples-agent-as-daemonset: This test is flaky in Prow CI\e[0m' WAR: examples-agent-as-daemonset: This test is flaky in Prow CI + skip_test examples-with-badger-and-volume 'This test is flaky in Prow CI' + '[' 2 -ne 2 ']' + test_name=examples-with-badger-and-volume + message='This test is flaky in Prow CI' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build + '[' _build '!=' _build ']' + rm -rf examples-with-badger-and-volume + warning 'examples-with-badger-and-volume: This test is flaky in Prow CI' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: examples-with-badger-and-volume: This test is flaky in Prow CI\e[0m' WAR: examples-with-badger-and-volume: This test is flaky in Prow CI + skip_test examples-collector-with-priority-class 'This test is flaky in Prow CI' + '[' 2 -ne 2 ']' + test_name=examples-collector-with-priority-class + message='This test is flaky in Prow CI' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build + '[' _build '!=' _build ']' + rm -rf examples-collector-with-priority-class + warning 'examples-collector-with-priority-class: This test is flaky in Prow CI' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: examples-collector-with-priority-class: This test is flaky in Prow CI\e[0m' WAR: examples-collector-with-priority-class: This test is flaky in Prow CI make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running examples E2E tests' Running examples E2E tests + cd tests/e2e/examples/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3414875983 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 11 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/examples-agent-with-priority-class === PAUSE kuttl/harness/examples-agent-with-priority-class === RUN kuttl/harness/examples-all-in-one-with-options === PAUSE kuttl/harness/examples-all-in-one-with-options === RUN kuttl/harness/examples-business-application-injected-sidecar === PAUSE kuttl/harness/examples-business-application-injected-sidecar === RUN kuttl/harness/examples-openshift-with-htpasswd === PAUSE kuttl/harness/examples-openshift-with-htpasswd === RUN kuttl/harness/examples-service-types === PAUSE kuttl/harness/examples-service-types === RUN kuttl/harness/examples-simple-prod === PAUSE kuttl/harness/examples-simple-prod === RUN kuttl/harness/examples-simple-prod-with-volumes === PAUSE kuttl/harness/examples-simple-prod-with-volumes === RUN kuttl/harness/examples-simplest === PAUSE kuttl/harness/examples-simplest === RUN kuttl/harness/examples-with-badger === PAUSE kuttl/harness/examples-with-badger === RUN kuttl/harness/examples-with-sampling === PAUSE kuttl/harness/examples-with-sampling === CONT kuttl/harness/artifacts logger.go:42: 07:16:13 | artifacts | Creating namespace: kuttl-test-fair-dinosaur logger.go:42: 07:16:13 | artifacts | artifacts events from ns kuttl-test-fair-dinosaur: logger.go:42: 07:16:13 | artifacts | Deleting namespace: kuttl-test-fair-dinosaur === CONT kuttl/harness/examples-simple-prod logger.go:42: 07:16:19 | examples-simple-prod | Creating namespace: kuttl-test-epic-burro logger.go:42: 07:16:19 | examples-simple-prod/1-install | starting test step 1-install logger.go:42: 07:16:19 | examples-simple-prod/1-install | Jaeger:kuttl-test-epic-burro/simple-prod created logger.go:42: 07:16:59 | examples-simple-prod/1-install | test step completed 1-install logger.go:42: 07:16:59 | examples-simple-prod/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:16:59 | examples-simple-prod/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 07:17:01 | examples-simple-prod/2-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:17:08 | examples-simple-prod/2-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:17:09 | examples-simple-prod/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:17:09 | examples-simple-prod/2-smoke-test | job.batch/report-span created logger.go:42: 07:17:09 | examples-simple-prod/2-smoke-test | job.batch/check-span created logger.go:42: 07:17:22 | examples-simple-prod/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:17:22 | examples-simple-prod | examples-simple-prod events from ns kuttl-test-epic-burro: logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestepicburrosimpleprod-1-69f4cc87-9qzpg Binding Scheduled Successfully assigned kuttl-test-epic-burro/elasticsearch-cdm-kuttltestepicburrosimpleprod-1-69f4cc87-9qzpg to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:26 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestepicburrosimpleprod-1-69f4cc87 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestepicburrosimpleprod-1-69f4cc87-9qzpg replicaset-controller logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:26 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestepicburrosimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestepicburrosimpleprod-1-69f4cc87 from 0 to 1 deployment-controller logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestepicburrosimpleprod-1-69f4cc87-9qzpg AddedInterface Add eth0 [10.131.0.24/23] from ovn-kubernetes multus logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestepicburrosimpleprod-1-69f4cc87-9qzpg.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:ebe8dc1ce7ba2f2badccbd4f55a96c60fccdc835fa3582b169ddd34fbf03ca76" already present on machine kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestepicburrosimpleprod-1-69f4cc87-9qzpg.spec.containers{elasticsearch} Created Created container: elasticsearch kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestepicburrosimpleprod-1-69f4cc87-9qzpg.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestepicburrosimpleprod-1-69f4cc87-9qzpg.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:5e4e69d24d07f6efda72b0c0baddfd2979902bfa92eb0a707bd3ed822b7c4a4c" already present on machine kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestepicburrosimpleprod-1-69f4cc87-9qzpg.spec.containers{proxy} Created Created container: proxy kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestepicburrosimpleprod-1-69f4cc87-9qzpg.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:40 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestepicburrosimpleprod-1-69f4cc87-9qzpg.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:45 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestepicburrosimpleprod-1-69f4cc87-9qzpg.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:56 +0000 UTC Normal Pod simple-prod-collector-5c67f9df8c-z4gnc Binding Scheduled Successfully assigned kuttl-test-epic-burro/simple-prod-collector-5c67f9df8c-z4gnc to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:56 +0000 UTC Warning Pod simple-prod-collector-5c67f9df8c-z4gnc FailedMount MountVolume.SetUp failed for volume "simple-prod-collector-tls-config-volume" : secret "simple-prod-collector-headless-tls" not found kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:56 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5c67f9df8c SuccessfulCreate Created pod: simple-prod-collector-5c67f9df8c-z4gnc replicaset-controller logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:56 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-5c67f9df8c from 0 to 1 deployment-controller logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:56 +0000 UTC Normal Pod simple-prod-query-cd8966b5f-kfjsg Binding Scheduled Successfully assigned kuttl-test-epic-burro/simple-prod-query-cd8966b5f-kfjsg to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:56 +0000 UTC Warning Pod simple-prod-query-cd8966b5f-kfjsg FailedMount MountVolume.SetUp failed for volume "simple-prod-ui-oauth-proxy-tls" : secret "simple-prod-ui-oauth-proxy-tls" not found kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:56 +0000 UTC Normal ReplicaSet.apps simple-prod-query-cd8966b5f SuccessfulCreate Created pod: simple-prod-query-cd8966b5f-kfjsg replicaset-controller logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:56 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-cd8966b5f from 0 to 1 deployment-controller logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:57 +0000 UTC Normal Pod simple-prod-collector-5c67f9df8c-z4gnc AddedInterface Add eth0 [10.129.2.52/23] from ovn-kubernetes multus logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:57 +0000 UTC Normal Pod simple-prod-collector-5c67f9df8c-z4gnc.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:40a1c7fa23aa5ffa64e8e22aa38022f5d4d7ff644c46a3da7169b713d486c3c1" already present on machine kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:57 +0000 UTC Normal Pod simple-prod-collector-5c67f9df8c-z4gnc.spec.containers{jaeger-collector} Created Created container: jaeger-collector kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:57 +0000 UTC Normal Pod simple-prod-collector-5c67f9df8c-z4gnc.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:57 +0000 UTC Normal Pod simple-prod-query-cd8966b5f-kfjsg AddedInterface Add eth0 [10.128.2.42/23] from ovn-kubernetes multus logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:57 +0000 UTC Normal Pod simple-prod-query-cd8966b5f-kfjsg.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:57 +0000 UTC Normal Pod simple-prod-query-cd8966b5f-kfjsg.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:57 +0000 UTC Normal Pod simple-prod-query-cd8966b5f-kfjsg.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:57 +0000 UTC Normal Pod simple-prod-query-cd8966b5f-kfjsg.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:57 +0000 UTC Normal Pod simple-prod-query-cd8966b5f-kfjsg.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:57 +0000 UTC Normal Pod simple-prod-query-cd8966b5f-kfjsg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:58 +0000 UTC Normal Pod simple-prod-query-cd8966b5f-kfjsg.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:58 +0000 UTC Normal Pod simple-prod-query-cd8966b5f-kfjsg.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:16:58 +0000 UTC Normal Pod simple-prod-query-cd8966b5f-kfjsg.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:04 +0000 UTC Normal Pod simple-prod-query-cd8966b5f-kfjsg.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:04 +0000 UTC Normal Pod simple-prod-query-cd8966b5f-kfjsg.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:04 +0000 UTC Normal Pod simple-prod-query-cd8966b5f-kfjsg.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:04 +0000 UTC Normal ReplicaSet.apps simple-prod-query-cd8966b5f SuccessfulDelete Deleted pod: simple-prod-query-cd8966b5f-kfjsg replicaset-controller logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:04 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-cd8966b5f from 1 to 0 deployment-controller logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:05 +0000 UTC Normal Pod simple-prod-query-76b4c57757-g2n9d Binding Scheduled Successfully assigned kuttl-test-epic-burro/simple-prod-query-76b4c57757-g2n9d to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:05 +0000 UTC Normal Pod simple-prod-query-76b4c57757-g2n9d AddedInterface Add eth0 [10.128.2.43/23] from ovn-kubernetes multus logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:05 +0000 UTC Normal Pod simple-prod-query-76b4c57757-g2n9d.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:05 +0000 UTC Normal Pod simple-prod-query-76b4c57757-g2n9d.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:05 +0000 UTC Normal Pod simple-prod-query-76b4c57757-g2n9d.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:05 +0000 UTC Normal Pod simple-prod-query-76b4c57757-g2n9d.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:05 +0000 UTC Normal Pod simple-prod-query-76b4c57757-g2n9d.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:05 +0000 UTC Normal Pod simple-prod-query-76b4c57757-g2n9d.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:05 +0000 UTC Normal Pod simple-prod-query-76b4c57757-g2n9d.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:05 +0000 UTC Normal Pod simple-prod-query-76b4c57757-g2n9d.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:05 +0000 UTC Normal Pod simple-prod-query-76b4c57757-g2n9d.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:05 +0000 UTC Normal ReplicaSet.apps simple-prod-query-76b4c57757 SuccessfulCreate Created pod: simple-prod-query-76b4c57757-g2n9d replicaset-controller logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:05 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-76b4c57757 from 0 to 1 deployment-controller logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:09 +0000 UTC Normal Pod check-span-9b5fz Binding Scheduled Successfully assigned kuttl-test-epic-burro/check-span-9b5fz to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:09 +0000 UTC Normal Pod check-span-9b5fz AddedInterface Add eth0 [10.129.2.54/23] from ovn-kubernetes multus logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:09 +0000 UTC Normal Pod check-span-9b5fz.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:09 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-9b5fz job-controller logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:09 +0000 UTC Normal Pod report-span-xtzhm Binding Scheduled Successfully assigned kuttl-test-epic-burro/report-span-xtzhm to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:09 +0000 UTC Normal Pod report-span-xtzhm AddedInterface Add eth0 [10.129.2.53/23] from ovn-kubernetes multus logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:09 +0000 UTC Normal Pod report-span-xtzhm.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:09 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-xtzhm job-controller logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:10 +0000 UTC Normal Pod check-span-9b5fz.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 741ms (741ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:10 +0000 UTC Normal Pod check-span-9b5fz.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:10 +0000 UTC Normal Pod check-span-9b5fz.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:10 +0000 UTC Normal Pod report-span-xtzhm.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 838ms (838ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:10 +0000 UTC Normal Pod report-span-xtzhm.spec.containers{report-span} Created Created container: report-span kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:10 +0000 UTC Normal Pod report-span-xtzhm.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:17:22 | examples-simple-prod | 2025-03-10 07:17:21 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:17:22 | examples-simple-prod | Deleting namespace: kuttl-test-epic-burro === CONT kuttl/harness/examples-with-sampling logger.go:42: 07:17:29 | examples-with-sampling | Creating namespace: kuttl-test-moved-lynx logger.go:42: 07:17:29 | examples-with-sampling/3- | starting test step 3- logger.go:42: 07:27:30 | examples-with-sampling/3- | test step failed 3- case.go:364: failed in step 3- case.go:366: jaegers.jaegertracing.io "with-sampling" not found logger.go:42: 07:27:30 | examples-with-sampling | examples-with-sampling events from ns kuttl-test-moved-lynx: logger.go:42: 07:27:30 | examples-with-sampling | Deleting namespace: kuttl-test-moved-lynx === CONT kuttl/harness/examples-with-badger logger.go:42: 07:27:36 | examples-with-badger | Creating namespace: kuttl-test-clever-blowfish logger.go:42: 07:27:36 | examples-with-badger/0-install | starting test step 0-install logger.go:42: 07:27:36 | examples-with-badger/0-install | Jaeger:kuttl-test-clever-blowfish/with-badger created logger.go:42: 07:27:43 | examples-with-badger/0-install | test step completed 0-install logger.go:42: 07:27:43 | examples-with-badger/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:27:43 | examples-with-badger/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-badger /dev/null] logger.go:42: 07:27:44 | examples-with-badger/1-smoke-test | Warning: resource jaegers/with-badger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:27:51 | examples-with-badger/1-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:27:51 | examples-with-badger/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:27:52 | examples-with-badger/1-smoke-test | job.batch/report-span created logger.go:42: 07:27:52 | examples-with-badger/1-smoke-test | job.batch/check-span created logger.go:42: 07:28:04 | examples-with-badger/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:28:04 | examples-with-badger | examples-with-badger events from ns kuttl-test-clever-blowfish: logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:40 +0000 UTC Normal Pod with-badger-5976f4657f-568dn Binding Scheduled Successfully assigned kuttl-test-clever-blowfish/with-badger-5976f4657f-568dn to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:40 +0000 UTC Normal ReplicaSet.apps with-badger-5976f4657f SuccessfulCreate Created pod: with-badger-5976f4657f-568dn replicaset-controller logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:40 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-5976f4657f from 0 to 1 deployment-controller logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:41 +0000 UTC Normal Pod with-badger-5976f4657f-568dn AddedInterface Add eth0 [10.129.2.55/23] from ovn-kubernetes multus logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:41 +0000 UTC Normal Pod with-badger-5976f4657f-568dn.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:41 +0000 UTC Normal Pod with-badger-5976f4657f-568dn.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:41 +0000 UTC Normal Pod with-badger-5976f4657f-568dn.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:41 +0000 UTC Normal Pod with-badger-5976f4657f-568dn.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:41 +0000 UTC Normal Pod with-badger-5976f4657f-568dn.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:41 +0000 UTC Normal Pod with-badger-5976f4657f-568dn.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:48 +0000 UTC Normal Pod with-badger-5976f4657f-568dn.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:48 +0000 UTC Normal Pod with-badger-5976f4657f-568dn.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:48 +0000 UTC Normal ReplicaSet.apps with-badger-5976f4657f SuccessfulDelete Deleted pod: with-badger-5976f4657f-568dn replicaset-controller logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:48 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled down replica set with-badger-5976f4657f from 1 to 0 deployment-controller logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:49 +0000 UTC Normal Pod with-badger-5d55d6db78-gnpm2 Binding Scheduled Successfully assigned kuttl-test-clever-blowfish/with-badger-5d55d6db78-gnpm2 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:49 +0000 UTC Normal Pod with-badger-5d55d6db78-gnpm2 AddedInterface Add eth0 [10.129.2.56/23] from ovn-kubernetes multus logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:49 +0000 UTC Normal ReplicaSet.apps with-badger-5d55d6db78 SuccessfulCreate Created pod: with-badger-5d55d6db78-gnpm2 replicaset-controller logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:49 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-5d55d6db78 from 0 to 1 deployment-controller logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:50 +0000 UTC Normal Pod with-badger-5d55d6db78-gnpm2.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:50 +0000 UTC Normal Pod with-badger-5d55d6db78-gnpm2.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:50 +0000 UTC Normal Pod with-badger-5d55d6db78-gnpm2.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:50 +0000 UTC Normal Pod with-badger-5d55d6db78-gnpm2.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:50 +0000 UTC Normal Pod with-badger-5d55d6db78-gnpm2.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:50 +0000 UTC Normal Pod with-badger-5d55d6db78-gnpm2.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:52 +0000 UTC Normal Pod check-span-z4587 Binding Scheduled Successfully assigned kuttl-test-clever-blowfish/check-span-z4587 to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:52 +0000 UTC Normal Pod check-span-z4587 AddedInterface Add eth0 [10.128.2.44/23] from ovn-kubernetes multus logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:52 +0000 UTC Normal Pod check-span-z4587.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:52 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-z4587 job-controller logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:52 +0000 UTC Normal Pod report-span-765rm Binding Scheduled Successfully assigned kuttl-test-clever-blowfish/report-span-765rm to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:52 +0000 UTC Normal Pod report-span-765rm AddedInterface Add eth0 [10.131.0.25/23] from ovn-kubernetes multus logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:52 +0000 UTC Normal Pod report-span-765rm.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:52 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-765rm job-controller logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:53 +0000 UTC Normal Pod check-span-z4587.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 758ms (758ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:53 +0000 UTC Normal Pod check-span-z4587.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:53 +0000 UTC Normal Pod check-span-z4587.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:54 +0000 UTC Normal Pod report-span-765rm.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 1.382s (1.382s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:54 +0000 UTC Normal Pod report-span-765rm.spec.containers{report-span} Created Created container: report-span kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:27:54 +0000 UTC Normal Pod report-span-765rm.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:28:04 | examples-with-badger | 2025-03-10 07:28:03 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:28:04 | examples-with-badger | Deleting namespace: kuttl-test-clever-blowfish === CONT kuttl/harness/examples-simplest logger.go:42: 07:28:16 | examples-simplest | Creating namespace: kuttl-test-clever-firefly logger.go:42: 07:28:16 | examples-simplest/0-install | starting test step 0-install logger.go:42: 07:28:16 | examples-simplest/0-install | Jaeger:kuttl-test-clever-firefly/simplest created logger.go:42: 07:28:22 | examples-simplest/0-install | test step completed 0-install logger.go:42: 07:28:22 | examples-simplest/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:28:22 | examples-simplest/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 07:28:23 | examples-simplest/1-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:28:30 | examples-simplest/1-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:28:30 | examples-simplest/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:28:31 | examples-simplest/1-smoke-test | job.batch/report-span created logger.go:42: 07:28:31 | examples-simplest/1-smoke-test | job.batch/check-span created logger.go:42: 07:28:43 | examples-simplest/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:28:43 | examples-simplest | examples-simplest events from ns kuttl-test-clever-firefly: logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:20 +0000 UTC Normal Pod simplest-6659c4bcdd-qnqd8 Binding Scheduled Successfully assigned kuttl-test-clever-firefly/simplest-6659c4bcdd-qnqd8 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:20 +0000 UTC Warning Pod simplest-6659c4bcdd-qnqd8 FailedMount MountVolume.SetUp failed for volume "simplest-collector-tls-config-volume" : secret "simplest-collector-headless-tls" not found kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:20 +0000 UTC Normal ReplicaSet.apps simplest-6659c4bcdd SuccessfulCreate Created pod: simplest-6659c4bcdd-qnqd8 replicaset-controller logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:20 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-6659c4bcdd from 0 to 1 deployment-controller logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:21 +0000 UTC Normal Pod simplest-6659c4bcdd-qnqd8 AddedInterface Add eth0 [10.129.2.57/23] from ovn-kubernetes multus logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:21 +0000 UTC Normal Pod simplest-6659c4bcdd-qnqd8.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:21 +0000 UTC Normal Pod simplest-6659c4bcdd-qnqd8.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:21 +0000 UTC Normal Pod simplest-6659c4bcdd-qnqd8.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:21 +0000 UTC Normal Pod simplest-6659c4bcdd-qnqd8.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:21 +0000 UTC Normal Pod simplest-6659c4bcdd-qnqd8.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:21 +0000 UTC Normal Pod simplest-6659c4bcdd-qnqd8.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:25 +0000 UTC Normal Pod simplest-6659c4bcdd-qnqd8.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:25 +0000 UTC Normal Pod simplest-6659c4bcdd-qnqd8.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:25 +0000 UTC Normal ReplicaSet.apps simplest-6659c4bcdd SuccessfulDelete Deleted pod: simplest-6659c4bcdd-qnqd8 replicaset-controller logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:25 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-6659c4bcdd from 1 to 0 deployment-controller logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:26 +0000 UTC Normal Pod simplest-766654ffd8-6hdk5 Binding Scheduled Successfully assigned kuttl-test-clever-firefly/simplest-766654ffd8-6hdk5 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:26 +0000 UTC Normal ReplicaSet.apps simplest-766654ffd8 SuccessfulCreate Created pod: simplest-766654ffd8-6hdk5 replicaset-controller logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:26 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-766654ffd8 from 0 to 1 deployment-controller logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:27 +0000 UTC Normal Pod simplest-766654ffd8-6hdk5 AddedInterface Add eth0 [10.129.2.58/23] from ovn-kubernetes multus logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:27 +0000 UTC Normal Pod simplest-766654ffd8-6hdk5.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:27 +0000 UTC Normal Pod simplest-766654ffd8-6hdk5.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:27 +0000 UTC Normal Pod simplest-766654ffd8-6hdk5.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:27 +0000 UTC Normal Pod simplest-766654ffd8-6hdk5.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:27 +0000 UTC Normal Pod simplest-766654ffd8-6hdk5.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:27 +0000 UTC Normal Pod simplest-766654ffd8-6hdk5.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:31 +0000 UTC Normal Pod check-span-bkv82 Binding Scheduled Successfully assigned kuttl-test-clever-firefly/check-span-bkv82 to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:31 +0000 UTC Normal Pod check-span-bkv82 AddedInterface Add eth0 [10.131.0.27/23] from ovn-kubernetes multus logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:31 +0000 UTC Normal Pod check-span-bkv82.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:31 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-bkv82 job-controller logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:31 +0000 UTC Normal Pod report-span-tfctt Binding Scheduled Successfully assigned kuttl-test-clever-firefly/report-span-tfctt to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:31 +0000 UTC Normal Pod report-span-tfctt AddedInterface Add eth0 [10.131.0.26/23] from ovn-kubernetes multus logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:31 +0000 UTC Normal Pod report-span-tfctt.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:31 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-tfctt job-controller logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:32 +0000 UTC Normal Pod check-span-bkv82.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 652ms (652ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:32 +0000 UTC Normal Pod check-span-bkv82.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:32 +0000 UTC Normal Pod check-span-bkv82.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:32 +0000 UTC Normal Pod report-span-tfctt.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 594ms (594ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:32 +0000 UTC Normal Pod report-span-tfctt.spec.containers{report-span} Created Created container: report-span kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:32 +0000 UTC Normal Pod report-span-tfctt.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:28:43 | examples-simplest | 2025-03-10 07:28:42 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:28:43 | examples-simplest | Deleting namespace: kuttl-test-clever-firefly === CONT kuttl/harness/examples-simple-prod-with-volumes logger.go:42: 07:28:55 | examples-simple-prod-with-volumes | Ignoring 03-check-volume.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:28:55 | examples-simple-prod-with-volumes | Creating namespace: kuttl-test-hardy-manatee logger.go:42: 07:28:55 | examples-simple-prod-with-volumes/1-install | starting test step 1-install logger.go:42: 07:28:55 | examples-simple-prod-with-volumes/1-install | Jaeger:kuttl-test-hardy-manatee/simple-prod created logger.go:42: 07:29:33 | examples-simple-prod-with-volumes/1-install | test step completed 1-install logger.go:42: 07:29:33 | examples-simple-prod-with-volumes/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:29:33 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 07:29:34 | examples-simple-prod-with-volumes/2-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:29:41 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:29:41 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:29:41 | examples-simple-prod-with-volumes/2-smoke-test | job.batch/report-span created logger.go:42: 07:29:41 | examples-simple-prod-with-volumes/2-smoke-test | job.batch/check-span created logger.go:42: 07:29:53 | examples-simple-prod-with-volumes/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:29:53 | examples-simple-prod-with-volumes/3-check-volume | starting test step 3-check-volume logger.go:42: 07:29:53 | examples-simple-prod-with-volumes/3-check-volume | running command: [sh -c kubectl exec $(kubectl get pods -n $NAMESPACE -l app=jaeger -l app.kubernetes.io/component=collector -o yaml | /tmp/jaeger-tests/bin/yq e '.items[0].metadata.name') -n $NAMESPACE -- ls /usr/share/elasticsearch/data] logger.go:42: 07:29:54 | examples-simple-prod-with-volumes/3-check-volume | test step completed 3-check-volume logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | examples-simple-prod-with-volumes events from ns kuttl-test-hardy-manatee: logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:00 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltesthardymanateesimpleprod-1-bb4bd55cc SuccessfulCreate Created pod: elasticsearch-cdm-kuttltesthardymanateesimpleprod-1-bb4bd5x2ngd replicaset-controller logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:00 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthardymanateesimpleprod-1-bb4bd5x2ngd Binding Scheduled Successfully assigned kuttl-test-hardy-manatee/elasticsearch-cdm-kuttltesthardymanateesimpleprod-1-bb4bd5x2ngd to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:00 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltesthardymanateesimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltesthardymanateesimpleprod-1-bb4bd55cc from 0 to 1 deployment-controller logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthardymanateesimpleprod-1-bb4bd5x2ngd AddedInterface Add eth0 [10.131.0.28/23] from ovn-kubernetes multus logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthardymanateesimpleprod-1-bb4bd5x2ngd.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:ebe8dc1ce7ba2f2badccbd4f55a96c60fccdc835fa3582b169ddd34fbf03ca76" already present on machine kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthardymanateesimpleprod-1-bb4bd5x2ngd.spec.containers{elasticsearch} Created Created container: elasticsearch kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthardymanateesimpleprod-1-bb4bd5x2ngd.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthardymanateesimpleprod-1-bb4bd5x2ngd.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:5e4e69d24d07f6efda72b0c0baddfd2979902bfa92eb0a707bd3ed822b7c4a4c" already present on machine kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthardymanateesimpleprod-1-bb4bd5x2ngd.spec.containers{proxy} Created Created container: proxy kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthardymanateesimpleprod-1-bb4bd5x2ngd.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:14 +0000 UTC Warning Pod elasticsearch-cdm-kuttltesthardymanateesimpleprod-1-bb4bd5x2ngd.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:30 +0000 UTC Normal Pod simple-prod-collector-9b7979f95-446pf Binding Scheduled Successfully assigned kuttl-test-hardy-manatee/simple-prod-collector-9b7979f95-446pf to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:30 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-9b7979f95 SuccessfulCreate Created pod: simple-prod-collector-9b7979f95-446pf replicaset-controller logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:30 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-9b7979f95 from 0 to 1 deployment-controller logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:30 +0000 UTC Normal Pod simple-prod-query-6f45bdf6d4-tqp8w Binding Scheduled Successfully assigned kuttl-test-hardy-manatee/simple-prod-query-6f45bdf6d4-tqp8w to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:30 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6f45bdf6d4 SuccessfulCreate Created pod: simple-prod-query-6f45bdf6d4-tqp8w replicaset-controller logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:30 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-6f45bdf6d4 from 0 to 1 deployment-controller logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:31 +0000 UTC Normal Pod simple-prod-collector-9b7979f95-446pf AddedInterface Add eth0 [10.129.2.59/23] from ovn-kubernetes multus logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:31 +0000 UTC Normal Pod simple-prod-collector-9b7979f95-446pf.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:40a1c7fa23aa5ffa64e8e22aa38022f5d4d7ff644c46a3da7169b713d486c3c1" already present on machine kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:31 +0000 UTC Normal Pod simple-prod-collector-9b7979f95-446pf.spec.containers{jaeger-collector} Created Created container: jaeger-collector kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:31 +0000 UTC Normal Pod simple-prod-collector-9b7979f95-446pf.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:31 +0000 UTC Normal Pod simple-prod-query-6f45bdf6d4-tqp8w AddedInterface Add eth0 [10.128.2.45/23] from ovn-kubernetes multus logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:31 +0000 UTC Normal Pod simple-prod-query-6f45bdf6d4-tqp8w.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:31 +0000 UTC Normal Pod simple-prod-query-6f45bdf6d4-tqp8w.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:31 +0000 UTC Normal Pod simple-prod-query-6f45bdf6d4-tqp8w.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:31 +0000 UTC Normal Pod simple-prod-query-6f45bdf6d4-tqp8w.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:31 +0000 UTC Normal Pod simple-prod-query-6f45bdf6d4-tqp8w.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:31 +0000 UTC Normal Pod simple-prod-query-6f45bdf6d4-tqp8w.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:31 +0000 UTC Normal Pod simple-prod-query-6f45bdf6d4-tqp8w.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:31 +0000 UTC Normal Pod simple-prod-query-6f45bdf6d4-tqp8w.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:31 +0000 UTC Normal Pod simple-prod-query-6f45bdf6d4-tqp8w.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:37 +0000 UTC Normal Pod simple-prod-query-545c6d4b6d-2jkqh Binding Scheduled Successfully assigned kuttl-test-hardy-manatee/simple-prod-query-545c6d4b6d-2jkqh to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:37 +0000 UTC Normal ReplicaSet.apps simple-prod-query-545c6d4b6d SuccessfulCreate Created pod: simple-prod-query-545c6d4b6d-2jkqh replicaset-controller logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:37 +0000 UTC Normal Pod simple-prod-query-6f45bdf6d4-tqp8w.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:37 +0000 UTC Normal Pod simple-prod-query-6f45bdf6d4-tqp8w.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:37 +0000 UTC Normal Pod simple-prod-query-6f45bdf6d4-tqp8w.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:37 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6f45bdf6d4 SuccessfulDelete Deleted pod: simple-prod-query-6f45bdf6d4-tqp8w replicaset-controller logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:37 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-6f45bdf6d4 from 1 to 0 deployment-controller logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:37 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-545c6d4b6d from 0 to 1 deployment-controller logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:38 +0000 UTC Normal Pod simple-prod-query-545c6d4b6d-2jkqh AddedInterface Add eth0 [10.128.2.46/23] from ovn-kubernetes multus logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:38 +0000 UTC Normal Pod simple-prod-query-545c6d4b6d-2jkqh.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:38 +0000 UTC Normal Pod simple-prod-query-545c6d4b6d-2jkqh.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:38 +0000 UTC Normal Pod simple-prod-query-545c6d4b6d-2jkqh.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:38 +0000 UTC Normal Pod simple-prod-query-545c6d4b6d-2jkqh.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:38 +0000 UTC Normal Pod simple-prod-query-545c6d4b6d-2jkqh.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:38 +0000 UTC Normal Pod simple-prod-query-545c6d4b6d-2jkqh.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:38 +0000 UTC Normal Pod simple-prod-query-545c6d4b6d-2jkqh.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:38 +0000 UTC Normal Pod simple-prod-query-545c6d4b6d-2jkqh.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:38 +0000 UTC Normal Pod simple-prod-query-545c6d4b6d-2jkqh.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:41 +0000 UTC Normal Pod check-span-5m7sv Binding Scheduled Successfully assigned kuttl-test-hardy-manatee/check-span-5m7sv to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:41 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-5m7sv job-controller logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:41 +0000 UTC Normal Pod report-span-9qchn Binding Scheduled Successfully assigned kuttl-test-hardy-manatee/report-span-9qchn to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:41 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-9qchn job-controller logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:42 +0000 UTC Normal Pod check-span-5m7sv AddedInterface Add eth0 [10.129.2.61/23] from ovn-kubernetes multus logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:42 +0000 UTC Normal Pod check-span-5m7sv.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:42 +0000 UTC Normal Pod report-span-9qchn AddedInterface Add eth0 [10.129.2.60/23] from ovn-kubernetes multus logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:42 +0000 UTC Normal Pod report-span-9qchn.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:42 +0000 UTC Normal Pod report-span-9qchn.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 775ms (775ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:42 +0000 UTC Normal Pod report-span-9qchn.spec.containers{report-span} Created Created container: report-span kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:42 +0000 UTC Normal Pod report-span-9qchn.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:43 +0000 UTC Normal Pod check-span-5m7sv.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 723ms (723ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:43 +0000 UTC Normal Pod check-span-5m7sv.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:43 +0000 UTC Normal Pod check-span-5m7sv.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:45 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:45 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:45 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | 2025-03-10 07:29:53 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:29:54 | examples-simple-prod-with-volumes | Deleting namespace: kuttl-test-hardy-manatee === CONT kuttl/harness/examples-business-application-injected-sidecar logger.go:42: 07:30:01 | examples-business-application-injected-sidecar | Creating namespace: kuttl-test-creative-reindeer logger.go:42: 07:30:01 | examples-business-application-injected-sidecar/0-install | starting test step 0-install logger.go:42: 07:30:01 | examples-business-application-injected-sidecar/0-install | Deployment:kuttl-test-creative-reindeer/myapp created logger.go:42: 07:30:01 | examples-business-application-injected-sidecar/0-install | test step completed 0-install logger.go:42: 07:30:01 | examples-business-application-injected-sidecar/1-install | starting test step 1-install logger.go:42: 07:30:01 | examples-business-application-injected-sidecar/1-install | Jaeger:kuttl-test-creative-reindeer/simplest created logger.go:42: 07:30:15 | examples-business-application-injected-sidecar/1-install | test step completed 1-install logger.go:42: 07:30:15 | examples-business-application-injected-sidecar/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:30:15 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 07:30:17 | examples-business-application-injected-sidecar/2-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:30:24 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:30:24 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:30:24 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/report-span created logger.go:42: 07:30:24 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/check-span created logger.go:42: 07:30:38 | examples-business-application-injected-sidecar/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | examples-business-application-injected-sidecar events from ns kuttl-test-creative-reindeer: logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:01 +0000 UTC Normal Pod myapp-685c5dfd64-l2hp2 Binding Scheduled Successfully assigned kuttl-test-creative-reindeer/myapp-685c5dfd64-l2hp2 to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:01 +0000 UTC Normal ReplicaSet.apps myapp-685c5dfd64 SuccessfulCreate Created pod: myapp-685c5dfd64-l2hp2 replicaset-controller logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:01 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-685c5dfd64 from 0 to 1 deployment-controller logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:02 +0000 UTC Normal Pod myapp-685c5dfd64-l2hp2 AddedInterface Add eth0 [10.131.0.29/23] from ovn-kubernetes multus logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:02 +0000 UTC Normal Pod myapp-685c5dfd64-l2hp2.spec.containers{myapp} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:05 +0000 UTC Normal Pod myapp-685c5dfd64-l2hp2.spec.containers{myapp} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.81s (3.81s including waiting). Image size: 282912835 bytes. kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:05 +0000 UTC Normal Pod myapp-868bccb979-68clc Binding Scheduled Successfully assigned kuttl-test-creative-reindeer/myapp-868bccb979-68clc to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:05 +0000 UTC Warning Pod myapp-868bccb979-68clc FailedMount MountVolume.SetUp failed for volume "simplest-trusted-ca" : configmap "simplest-trusted-ca" not found kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:05 +0000 UTC Warning Pod myapp-868bccb979-68clc FailedMount MountVolume.SetUp failed for volume "simplest-service-ca" : configmap "simplest-service-ca" not found kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:05 +0000 UTC Normal ReplicaSet.apps myapp-868bccb979 SuccessfulCreate Created pod: myapp-868bccb979-68clc replicaset-controller logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:05 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-868bccb979 from 0 to 1 deployment-controller logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:06 +0000 UTC Normal Pod myapp-685c5dfd64-l2hp2.spec.containers{myapp} Created Created container: myapp kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:06 +0000 UTC Normal Pod myapp-685c5dfd64-l2hp2.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:10 +0000 UTC Normal Pod simplest-7d5f65d9c7-fsdkh Binding Scheduled Successfully assigned kuttl-test-creative-reindeer/simplest-7d5f65d9c7-fsdkh to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:10 +0000 UTC Normal ReplicaSet.apps simplest-7d5f65d9c7 SuccessfulCreate Created pod: simplest-7d5f65d9c7-fsdkh replicaset-controller logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:10 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-7d5f65d9c7 from 0 to 1 deployment-controller logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:11 +0000 UTC Normal Pod simplest-7d5f65d9c7-fsdkh AddedInterface Add eth0 [10.128.2.48/23] from ovn-kubernetes multus logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:11 +0000 UTC Normal Pod simplest-7d5f65d9c7-fsdkh.spec.containers{jaeger} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:12 +0000 UTC Warning Pod myapp-685c5dfd64-l2hp2.spec.containers{myapp} Unhealthy Liveness probe failed: Get "http://10.131.0.29:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:13 +0000 UTC Normal Pod myapp-868bccb979-68clc AddedInterface Add eth0 [10.129.2.62/23] from ovn-kubernetes multus logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:13 +0000 UTC Normal Pod myapp-868bccb979-68clc.spec.containers{myapp} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:13 +0000 UTC Normal Pod simplest-7d5f65d9c7-fsdkh.spec.containers{jaeger} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" in 2.575s (2.575s including waiting). Image size: 144304495 bytes. kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:13 +0000 UTC Normal Pod simplest-7d5f65d9c7-fsdkh.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:13 +0000 UTC Normal Pod simplest-7d5f65d9c7-fsdkh.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:13 +0000 UTC Normal Pod simplest-7d5f65d9c7-fsdkh.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:14 +0000 UTC Normal Pod simplest-7d5f65d9c7-fsdkh.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:14 +0000 UTC Normal Pod simplest-7d5f65d9c7-fsdkh.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:16 +0000 UTC Normal Pod myapp-868bccb979-68clc.spec.containers{myapp} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.201s (3.201s including waiting). Image size: 282912835 bytes. kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:16 +0000 UTC Normal Pod myapp-868bccb979-68clc.spec.containers{myapp} Created Created container: myapp kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:16 +0000 UTC Normal Pod myapp-868bccb979-68clc.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:16 +0000 UTC Normal Pod myapp-868bccb979-68clc.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:16 +0000 UTC Normal Pod myapp-868bccb979-68clc.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:16 +0000 UTC Normal Pod myapp-868bccb979-68clc.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:17 +0000 UTC Normal Pod myapp-685c5dfd64-l2hp2.spec.containers{myapp} Killing Stopping container myapp kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:17 +0000 UTC Normal ReplicaSet.apps myapp-685c5dfd64 SuccessfulDelete Deleted pod: myapp-685c5dfd64-l2hp2 replicaset-controller logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:17 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled down replica set myapp-685c5dfd64 from 1 to 0 deployment-controller logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:22 +0000 UTC Normal Pod simplest-7d5f65d9c7-fsdkh.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:22 +0000 UTC Normal Pod simplest-7d5f65d9c7-fsdkh.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:22 +0000 UTC Normal ReplicaSet.apps simplest-7d5f65d9c7 SuccessfulDelete Deleted pod: simplest-7d5f65d9c7-fsdkh replicaset-controller logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:22 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-7d5f65d9c7 from 1 to 0 deployment-controller logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:23 +0000 UTC Normal Pod simplest-545566fc46-2xf28 Binding Scheduled Successfully assigned kuttl-test-creative-reindeer/simplest-545566fc46-2xf28 to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:23 +0000 UTC Normal Pod simplest-545566fc46-2xf28 AddedInterface Add eth0 [10.128.2.49/23] from ovn-kubernetes multus logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:23 +0000 UTC Normal Pod simplest-545566fc46-2xf28.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:23 +0000 UTC Normal Pod simplest-545566fc46-2xf28.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:23 +0000 UTC Normal Pod simplest-545566fc46-2xf28.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:23 +0000 UTC Normal Pod simplest-545566fc46-2xf28.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:23 +0000 UTC Normal Pod simplest-545566fc46-2xf28.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:23 +0000 UTC Normal Pod simplest-545566fc46-2xf28.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:23 +0000 UTC Normal ReplicaSet.apps simplest-545566fc46 SuccessfulCreate Created pod: simplest-545566fc46-2xf28 replicaset-controller logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:23 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-545566fc46 from 0 to 1 deployment-controller logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:24 +0000 UTC Normal Pod check-span-c6hrp Binding Scheduled Successfully assigned kuttl-test-creative-reindeer/check-span-c6hrp to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:24 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-c6hrp job-controller logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:24 +0000 UTC Warning Pod myapp-868bccb979-68clc.spec.containers{myapp} Unhealthy Liveness probe failed: Get "http://10.129.2.62:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:24 +0000 UTC Normal Pod report-span-8htbt Binding Scheduled Successfully assigned kuttl-test-creative-reindeer/report-span-8htbt to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:24 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-8htbt job-controller logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:25 +0000 UTC Normal Pod check-span-c6hrp AddedInterface Add eth0 [10.131.0.31/23] from ovn-kubernetes multus logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:25 +0000 UTC Normal Pod check-span-c6hrp.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:25 +0000 UTC Normal Pod report-span-8htbt AddedInterface Add eth0 [10.131.0.30/23] from ovn-kubernetes multus logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:25 +0000 UTC Normal Pod report-span-8htbt.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:25 +0000 UTC Normal Pod report-span-8htbt.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 538ms (538ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:26 +0000 UTC Normal Pod check-span-c6hrp.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 715ms (715ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:26 +0000 UTC Normal Pod check-span-c6hrp.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:26 +0000 UTC Normal Pod check-span-c6hrp.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:26 +0000 UTC Normal Pod report-span-8htbt.spec.containers{report-span} Created Created container: report-span kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:26 +0000 UTC Normal Pod report-span-8htbt.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | 2025-03-10 07:30:37 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:30:38 | examples-business-application-injected-sidecar | Deleting namespace: kuttl-test-creative-reindeer === CONT kuttl/harness/examples-service-types logger.go:42: 07:30:50 | examples-service-types | Creating namespace: kuttl-test-lucky-chigger logger.go:42: 07:30:50 | examples-service-types/0-install | starting test step 0-install logger.go:42: 07:30:50 | examples-service-types/0-install | Jaeger:kuttl-test-lucky-chigger/service-types created logger.go:42: 07:30:57 | examples-service-types/0-install | test step completed 0-install logger.go:42: 07:30:57 | examples-service-types/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:30:57 | examples-service-types/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE service-types /dev/null] logger.go:42: 07:30:59 | examples-service-types/1-smoke-test | Warning: resource jaegers/service-types is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:31:05 | examples-service-types/1-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://service-types-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:31:05 | examples-service-types/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:31:06 | examples-service-types/1-smoke-test | job.batch/report-span created logger.go:42: 07:31:06 | examples-service-types/1-smoke-test | job.batch/check-span created logger.go:42: 07:31:19 | examples-service-types/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:31:19 | examples-service-types/2- | starting test step 2- logger.go:42: 07:31:19 | examples-service-types/2- | test step completed 2- logger.go:42: 07:31:19 | examples-service-types | examples-service-types events from ns kuttl-test-lucky-chigger: logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:30:54 +0000 UTC Normal Pod service-types-7dc499bf96-25lfd Binding Scheduled Successfully assigned kuttl-test-lucky-chigger/service-types-7dc499bf96-25lfd to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:30:54 +0000 UTC Normal ReplicaSet.apps service-types-7dc499bf96 SuccessfulCreate Created pod: service-types-7dc499bf96-25lfd replicaset-controller logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:30:54 +0000 UTC Normal Service service-types-collector EnsuringLoadBalancer Ensuring load balancer service-controller logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:30:54 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-7dc499bf96 from 0 to 1 deployment-controller logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:30:55 +0000 UTC Normal Pod service-types-7dc499bf96-25lfd AddedInterface Add eth0 [10.129.2.63/23] from ovn-kubernetes multus logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:30:55 +0000 UTC Normal Pod service-types-7dc499bf96-25lfd.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:30:55 +0000 UTC Normal Pod service-types-7dc499bf96-25lfd.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:30:55 +0000 UTC Normal Pod service-types-7dc499bf96-25lfd.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:30:55 +0000 UTC Normal Pod service-types-7dc499bf96-25lfd.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:30:55 +0000 UTC Normal Pod service-types-7dc499bf96-25lfd.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:30:55 +0000 UTC Normal Pod service-types-7dc499bf96-25lfd.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:30:57 +0000 UTC Normal Service service-types-collector EnsuredLoadBalancer Ensured load balancer service-controller logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:30:57 +0000 UTC Normal Service service-types-query EnsuringLoadBalancer Ensuring load balancer service-controller logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:00 +0000 UTC Normal Service service-types-query EnsuredLoadBalancer Ensured load balancer service-controller logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:02 +0000 UTC Normal Pod service-types-7dc499bf96-25lfd.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:02 +0000 UTC Normal Pod service-types-7dc499bf96-25lfd.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:02 +0000 UTC Normal ReplicaSet.apps service-types-7dc499bf96 SuccessfulDelete Deleted pod: service-types-7dc499bf96-25lfd replicaset-controller logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:02 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled down replica set service-types-7dc499bf96 from 1 to 0 deployment-controller logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:03 +0000 UTC Normal Pod service-types-7bd7c64d4f-6d8cl Binding Scheduled Successfully assigned kuttl-test-lucky-chigger/service-types-7bd7c64d4f-6d8cl to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:03 +0000 UTC Normal Pod service-types-7bd7c64d4f-6d8cl AddedInterface Add eth0 [10.128.2.50/23] from ovn-kubernetes multus logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:03 +0000 UTC Normal Pod service-types-7bd7c64d4f-6d8cl.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:03 +0000 UTC Normal Pod service-types-7bd7c64d4f-6d8cl.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:03 +0000 UTC Normal Pod service-types-7bd7c64d4f-6d8cl.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:03 +0000 UTC Normal Pod service-types-7bd7c64d4f-6d8cl.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:03 +0000 UTC Normal Pod service-types-7bd7c64d4f-6d8cl.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:03 +0000 UTC Normal Pod service-types-7bd7c64d4f-6d8cl.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:03 +0000 UTC Normal ReplicaSet.apps service-types-7bd7c64d4f SuccessfulCreate Created pod: service-types-7bd7c64d4f-6d8cl replicaset-controller logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:03 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-7bd7c64d4f from 0 to 1 deployment-controller logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:06 +0000 UTC Normal Pod check-span-4l9hf Binding Scheduled Successfully assigned kuttl-test-lucky-chigger/check-span-4l9hf to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:06 +0000 UTC Normal Pod check-span-4l9hf AddedInterface Add eth0 [10.129.2.64/23] from ovn-kubernetes multus logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:06 +0000 UTC Normal Pod check-span-4l9hf.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:06 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-4l9hf job-controller logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:06 +0000 UTC Normal Pod report-span-5kwl4 Binding Scheduled Successfully assigned kuttl-test-lucky-chigger/report-span-5kwl4 to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:06 +0000 UTC Normal Pod report-span-5kwl4 AddedInterface Add eth0 [10.131.0.32/23] from ovn-kubernetes multus logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:06 +0000 UTC Normal Pod report-span-5kwl4.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:06 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-5kwl4 job-controller logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:07 +0000 UTC Normal Pod check-span-4l9hf.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 1.067s (1.067s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:07 +0000 UTC Normal Pod check-span-4l9hf.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:07 +0000 UTC Normal Pod check-span-4l9hf.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:07 +0000 UTC Normal Pod report-span-5kwl4.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 555ms (555ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:07 +0000 UTC Normal Pod report-span-5kwl4.spec.containers{report-span} Created Created container: report-span kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:07 +0000 UTC Normal Pod report-span-5kwl4.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:31:19 | examples-service-types | 2025-03-10 07:31:18 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:31:19 | examples-service-types | Deleting namespace: kuttl-test-lucky-chigger === CONT kuttl/harness/examples-openshift-with-htpasswd logger.go:42: 07:32:05 | examples-openshift-with-htpasswd | Ignoring 00-install.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:32:05 | examples-openshift-with-htpasswd | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:32:05 | examples-openshift-with-htpasswd | Creating namespace: kuttl-test-stirred-wallaby logger.go:42: 07:32:05 | examples-openshift-with-htpasswd/0-install | starting test step 0-install logger.go:42: 07:32:05 | examples-openshift-with-htpasswd/0-install | Secret:kuttl-test-stirred-wallaby/htpasswd created logger.go:42: 07:32:05 | examples-openshift-with-htpasswd/0-install | test step completed 0-install logger.go:42: 07:32:05 | examples-openshift-with-htpasswd/1-install | starting test step 1-install logger.go:42: 07:32:05 | examples-openshift-with-htpasswd/1-install | Jaeger:kuttl-test-stirred-wallaby/with-htpasswd created logger.go:42: 07:32:12 | examples-openshift-with-htpasswd/1-install | test step completed 1-install logger.go:42: 07:32:12 | examples-openshift-with-htpasswd/2-check-unsecured | starting test step 2-check-unsecured logger.go:42: 07:32:12 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [./ensure-ingress-host.sh] logger.go:42: 07:32:12 | examples-openshift-with-htpasswd/2-check-unsecured | Checking the Ingress host value was populated logger.go:42: 07:32:12 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 0 logger.go:42: 07:32:12 | examples-openshift-with-htpasswd/2-check-unsecured | Hostname is with-htpasswd-kuttl-test-stirred-wallaby.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com logger.go:42: 07:32:12 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 07:32:12 | examples-openshift-with-htpasswd/2-check-unsecured | Checking an expected HTTP response logger.go:42: 07:32:12 | examples-openshift-with-htpasswd/2-check-unsecured | Running in OpenShift logger.go:42: 07:32:12 | examples-openshift-with-htpasswd/2-check-unsecured | Not using any secret logger.go:42: 07:32:12 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 1/30 the https://with-htpasswd-kuttl-test-stirred-wallaby.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search logger.go:42: 07:32:12 | examples-openshift-with-htpasswd/2-check-unsecured | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:32:12 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 2/30 the https://with-htpasswd-kuttl-test-stirred-wallaby.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search logger.go:42: 07:32:12 | examples-openshift-with-htpasswd/2-check-unsecured | HTTP response is 503. 403 expected. Waiting 10 s logger.go:42: 07:32:22 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 3/30 the https://with-htpasswd-kuttl-test-stirred-wallaby.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search logger.go:42: 07:32:22 | examples-openshift-with-htpasswd/2-check-unsecured | curl response asserted properly logger.go:42: 07:32:22 | examples-openshift-with-htpasswd/2-check-unsecured | test step completed 2-check-unsecured logger.go:42: 07:32:22 | examples-openshift-with-htpasswd/3-check-unauthorized | starting test step 3-check-unauthorized logger.go:42: 07:32:22 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [./ensure-ingress-host.sh] logger.go:42: 07:32:22 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking the Ingress host value was populated logger.go:42: 07:32:22 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 0 logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/3-check-unauthorized | Hostname is with-htpasswd-kuttl-test-stirred-wallaby.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [sh -c JAEGER_USERNAME=wronguser JAEGER_PASSWORD=wrongpassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking an expected HTTP response logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/3-check-unauthorized | Running in OpenShift logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/3-check-unauthorized | Using Jaeger basic authentication logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 1/30 the https://with-htpasswd-kuttl-test-stirred-wallaby.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/3-check-unauthorized | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 2/30 the https://with-htpasswd-kuttl-test-stirred-wallaby.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/3-check-unauthorized | curl response asserted properly logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/3-check-unauthorized | test step completed 3-check-unauthorized logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/4-check-authorized | starting test step 4-check-authorized logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/4-check-authorized | running command: [./ensure-ingress-host.sh] logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/4-check-authorized | Checking the Ingress host value was populated logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/4-check-authorized | Try number 0 logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/4-check-authorized | Hostname is with-htpasswd-kuttl-test-stirred-wallaby.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/4-check-authorized | running command: [sh -c JAEGER_USERNAME=awesomeuser JAEGER_PASSWORD=awesomepassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE with-htpasswd] logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/4-check-authorized | Checking an expected HTTP response logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/4-check-authorized | Running in OpenShift logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/4-check-authorized | Using Jaeger basic authentication logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/4-check-authorized | Try number 1/30 the https://with-htpasswd-kuttl-test-stirred-wallaby.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/4-check-authorized | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/4-check-authorized | Try number 2/30 the https://with-htpasswd-kuttl-test-stirred-wallaby.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/4-check-authorized | curl response asserted properly logger.go:42: 07:32:23 | examples-openshift-with-htpasswd/4-check-authorized | test step completed 4-check-authorized logger.go:42: 07:32:23 | examples-openshift-with-htpasswd | examples-openshift-with-htpasswd events from ns kuttl-test-stirred-wallaby: logger.go:42: 07:32:23 | examples-openshift-with-htpasswd | 2025-03-10 07:32:09 +0000 UTC Normal Pod with-htpasswd-65c59bfcf5-rhx4k Binding Scheduled Successfully assigned kuttl-test-stirred-wallaby/with-htpasswd-65c59bfcf5-rhx4k to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:32:23 | examples-openshift-with-htpasswd | 2025-03-10 07:32:09 +0000 UTC Warning Pod with-htpasswd-65c59bfcf5-rhx4k FailedMount MountVolume.SetUp failed for volume "with-htpasswd-collector-tls-config-volume" : secret "with-htpasswd-collector-headless-tls" not found kubelet logger.go:42: 07:32:23 | examples-openshift-with-htpasswd | 2025-03-10 07:32:09 +0000 UTC Normal ReplicaSet.apps with-htpasswd-65c59bfcf5 SuccessfulCreate Created pod: with-htpasswd-65c59bfcf5-rhx4k replicaset-controller logger.go:42: 07:32:23 | examples-openshift-with-htpasswd | 2025-03-10 07:32:09 +0000 UTC Normal Deployment.apps with-htpasswd ScalingReplicaSet Scaled up replica set with-htpasswd-65c59bfcf5 from 0 to 1 deployment-controller logger.go:42: 07:32:23 | examples-openshift-with-htpasswd | 2025-03-10 07:32:10 +0000 UTC Normal Pod with-htpasswd-65c59bfcf5-rhx4k AddedInterface Add eth0 [10.128.2.51/23] from ovn-kubernetes multus logger.go:42: 07:32:23 | examples-openshift-with-htpasswd | 2025-03-10 07:32:10 +0000 UTC Normal Pod with-htpasswd-65c59bfcf5-rhx4k.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:32:23 | examples-openshift-with-htpasswd | 2025-03-10 07:32:10 +0000 UTC Normal Pod with-htpasswd-65c59bfcf5-rhx4k.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:32:23 | examples-openshift-with-htpasswd | 2025-03-10 07:32:10 +0000 UTC Normal Pod with-htpasswd-65c59bfcf5-rhx4k.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:32:23 | examples-openshift-with-htpasswd | 2025-03-10 07:32:10 +0000 UTC Normal Pod with-htpasswd-65c59bfcf5-rhx4k.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:32:23 | examples-openshift-with-htpasswd | 2025-03-10 07:32:10 +0000 UTC Normal Pod with-htpasswd-65c59bfcf5-rhx4k.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:32:23 | examples-openshift-with-htpasswd | 2025-03-10 07:32:10 +0000 UTC Normal Pod with-htpasswd-65c59bfcf5-rhx4k.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:32:23 | examples-openshift-with-htpasswd | Deleting namespace: kuttl-test-stirred-wallaby === CONT kuttl/harness/examples-all-in-one-with-options logger.go:42: 07:32:29 | examples-all-in-one-with-options | Creating namespace: kuttl-test-master-lamb logger.go:42: 07:32:29 | examples-all-in-one-with-options/0-install | starting test step 0-install logger.go:42: 07:32:29 | examples-all-in-one-with-options/0-install | Jaeger:kuttl-test-master-lamb/my-jaeger created logger.go:42: 07:32:35 | examples-all-in-one-with-options/0-install | test step completed 0-install logger.go:42: 07:32:35 | examples-all-in-one-with-options/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:32:35 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:32:37 | examples-all-in-one-with-options/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:32:44 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443/jaeger MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:32:44 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:32:44 | examples-all-in-one-with-options/1-smoke-test | job.batch/report-span created logger.go:42: 07:32:44 | examples-all-in-one-with-options/1-smoke-test | job.batch/check-span created logger.go:42: 07:32:58 | examples-all-in-one-with-options/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:32:58 | examples-all-in-one-with-options | examples-all-in-one-with-options events from ns kuttl-test-master-lamb: logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:33 +0000 UTC Normal Pod my-jaeger-6c9fb54cc6-6jdrj Binding Scheduled Successfully assigned kuttl-test-master-lamb/my-jaeger-6c9fb54cc6-6jdrj to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:33 +0000 UTC Normal ReplicaSet.apps my-jaeger-6c9fb54cc6 SuccessfulCreate Created pod: my-jaeger-6c9fb54cc6-6jdrj replicaset-controller logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:33 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-6c9fb54cc6 from 0 to 1 deployment-controller logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:34 +0000 UTC Normal Pod my-jaeger-6c9fb54cc6-6jdrj AddedInterface Add eth0 [10.128.2.52/23] from ovn-kubernetes multus logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:34 +0000 UTC Normal Pod my-jaeger-6c9fb54cc6-6jdrj.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:34 +0000 UTC Normal Pod my-jaeger-6c9fb54cc6-6jdrj.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:34 +0000 UTC Normal Pod my-jaeger-6c9fb54cc6-6jdrj.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:34 +0000 UTC Normal Pod my-jaeger-6c9fb54cc6-6jdrj.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:34 +0000 UTC Normal Pod my-jaeger-6c9fb54cc6-6jdrj.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:34 +0000 UTC Normal Pod my-jaeger-6c9fb54cc6-6jdrj.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:41 +0000 UTC Normal Pod my-jaeger-6c9fb54cc6-6jdrj.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:41 +0000 UTC Normal Pod my-jaeger-6c9fb54cc6-6jdrj.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:41 +0000 UTC Normal ReplicaSet.apps my-jaeger-6c9fb54cc6 SuccessfulDelete Deleted pod: my-jaeger-6c9fb54cc6-6jdrj replicaset-controller logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:41 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-6c9fb54cc6 from 1 to 0 deployment-controller logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:42 +0000 UTC Normal Pod my-jaeger-69c6949b88-2wxgv Binding Scheduled Successfully assigned kuttl-test-master-lamb/my-jaeger-69c6949b88-2wxgv to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:42 +0000 UTC Normal ReplicaSet.apps my-jaeger-69c6949b88 SuccessfulCreate Created pod: my-jaeger-69c6949b88-2wxgv replicaset-controller logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:42 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-69c6949b88 from 0 to 1 deployment-controller logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:43 +0000 UTC Normal Pod my-jaeger-69c6949b88-2wxgv AddedInterface Add eth0 [10.129.2.65/23] from ovn-kubernetes multus logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:43 +0000 UTC Normal Pod my-jaeger-69c6949b88-2wxgv.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:43 +0000 UTC Normal Pod my-jaeger-69c6949b88-2wxgv.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:43 +0000 UTC Normal Pod my-jaeger-69c6949b88-2wxgv.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:43 +0000 UTC Normal Pod my-jaeger-69c6949b88-2wxgv.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:43 +0000 UTC Normal Pod my-jaeger-69c6949b88-2wxgv.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:43 +0000 UTC Normal Pod my-jaeger-69c6949b88-2wxgv.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:44 +0000 UTC Normal Pod check-span-87qt9 Binding Scheduled Successfully assigned kuttl-test-master-lamb/check-span-87qt9 to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:44 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-87qt9 job-controller logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:44 +0000 UTC Normal Pod report-span-cml9d Binding Scheduled Successfully assigned kuttl-test-master-lamb/report-span-cml9d to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:44 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-cml9d job-controller logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:45 +0000 UTC Normal Pod check-span-87qt9 AddedInterface Add eth0 [10.131.0.34/23] from ovn-kubernetes multus logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:45 +0000 UTC Normal Pod check-span-87qt9.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:45 +0000 UTC Normal Pod report-span-cml9d AddedInterface Add eth0 [10.131.0.33/23] from ovn-kubernetes multus logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:45 +0000 UTC Normal Pod report-span-cml9d.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:46 +0000 UTC Normal Pod check-span-87qt9.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 907ms (907ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:46 +0000 UTC Normal Pod check-span-87qt9.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:46 +0000 UTC Normal Pod check-span-87qt9.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:46 +0000 UTC Normal Pod report-span-cml9d.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 912ms (912ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:46 +0000 UTC Normal Pod report-span-cml9d.spec.containers{report-span} Created Created container: report-span kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:46 +0000 UTC Normal Pod report-span-cml9d.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:32:58 | examples-all-in-one-with-options | 2025-03-10 07:32:57 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:32:58 | examples-all-in-one-with-options | Deleting namespace: kuttl-test-master-lamb === CONT kuttl/harness/examples-agent-with-priority-class logger.go:42: 07:33:10 | examples-agent-with-priority-class | Creating namespace: kuttl-test-warm-moth logger.go:42: 07:33:10 | examples-agent-with-priority-class/0-install | starting test step 0-install logger.go:42: 07:33:10 | examples-agent-with-priority-class/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 07:33:10 | examples-agent-with-priority-class/0-install | ServiceAccount:kuttl-test-warm-moth/jaeger-agent-daemonset created logger.go:42: 07:33:10 | examples-agent-with-priority-class/0-install | test step completed 0-install logger.go:42: 07:33:10 | examples-agent-with-priority-class/1-add-policy | starting test step 1-add-policy logger.go:42: 07:33:10 | examples-agent-with-priority-class/1-add-policy | running command: [sh -c oc adm policy --namespace $NAMESPACE add-scc-to-user daemonset-with-hostport -z jaeger-agent-daemonset] logger.go:42: 07:33:10 | examples-agent-with-priority-class/1-add-policy | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:daemonset-with-hostport added: "jaeger-agent-daemonset" logger.go:42: 07:33:10 | examples-agent-with-priority-class/1-add-policy | running command: [sh -c sleep 5] logger.go:42: 07:33:15 | examples-agent-with-priority-class/1-add-policy | test step completed 1-add-policy logger.go:42: 07:33:15 | examples-agent-with-priority-class/2-install | starting test step 2-install logger.go:42: 07:33:15 | examples-agent-with-priority-class/2-install | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 07:33:23 | examples-agent-with-priority-class/2-install | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:33:23 | examples-agent-with-priority-class/2-install | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:33:23 | examples-agent-with-priority-class/2-install | job.batch/report-span created logger.go:42: 07:33:23 | examples-agent-with-priority-class/2-install | job.batch/check-span created logger.go:42: 07:33:23 | examples-agent-with-priority-class/2-install | PriorityClass:/high-priority created logger.go:42: 07:33:24 | examples-agent-with-priority-class/2-install | Jaeger:kuttl-test-warm-moth/agent-as-daemonset updated logger.go:42: 07:33:36 | examples-agent-with-priority-class/2-install | test step completed 2-install logger.go:42: 07:33:36 | examples-agent-with-priority-class | examples-agent-with-priority-class events from ns kuttl-test-warm-moth: logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:21 +0000 UTC Normal Pod agent-as-daemonset-dc4b57d98-dgmp4 Binding Scheduled Successfully assigned kuttl-test-warm-moth/agent-as-daemonset-dc4b57d98-dgmp4 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:21 +0000 UTC Normal Pod agent-as-daemonset-dc4b57d98-dgmp4 AddedInterface Add eth0 [10.129.2.66/23] from ovn-kubernetes multus logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:21 +0000 UTC Normal Pod agent-as-daemonset-dc4b57d98-dgmp4.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:21 +0000 UTC Normal Pod agent-as-daemonset-dc4b57d98-dgmp4.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:21 +0000 UTC Normal Pod agent-as-daemonset-dc4b57d98-dgmp4.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:21 +0000 UTC Normal Pod agent-as-daemonset-dc4b57d98-dgmp4.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:21 +0000 UTC Normal Pod agent-as-daemonset-dc4b57d98-dgmp4.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:21 +0000 UTC Normal Pod agent-as-daemonset-dc4b57d98-dgmp4.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:21 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-dc4b57d98 SuccessfulCreate Created pod: agent-as-daemonset-dc4b57d98-dgmp4 replicaset-controller logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:21 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-dc4b57d98 from 0 to 1 deployment-controller logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:23 +0000 UTC Normal Pod check-span-22sx9 Binding Scheduled Successfully assigned kuttl-test-warm-moth/check-span-22sx9 to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:23 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-22sx9 job-controller logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:23 +0000 UTC Normal Pod report-span-n5lpb Binding Scheduled Successfully assigned kuttl-test-warm-moth/report-span-n5lpb to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:23 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-n5lpb job-controller logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:24 +0000 UTC Normal Pod check-span-22sx9 AddedInterface Add eth0 [10.131.0.36/23] from ovn-kubernetes multus logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:24 +0000 UTC Normal Pod check-span-22sx9.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:24 +0000 UTC Normal Pod report-span-n5lpb AddedInterface Add eth0 [10.131.0.35/23] from ovn-kubernetes multus logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:24 +0000 UTC Normal Pod report-span-n5lpb.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:25 +0000 UTC Normal Pod check-span-22sx9.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 619ms (619ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:25 +0000 UTC Normal Pod check-span-22sx9.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:25 +0000 UTC Normal Pod check-span-22sx9.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:25 +0000 UTC Normal Pod report-span-n5lpb.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 1.138s (1.138s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:25 +0000 UTC Normal Pod report-span-n5lpb.spec.containers{report-span} Created Created container: report-span kubelet logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:25 +0000 UTC Normal Pod report-span-n5lpb.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:28 +0000 UTC Warning DaemonSet.apps agent-as-daemonset-agent-daemonset FailedCreate Error creating: pods "agent-as-daemonset-agent-daemonset-" is forbidden: unable to validate against any security context constraint: [provider "anyuid": Forbidden: not usable by user or serviceaccount, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5775: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5778: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6831: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6832: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 14271: Host ports are not allowed to be used, provider "restricted": Forbidden: not usable by user or serviceaccount, provider "nonroot-v2": Forbidden: not usable by user or serviceaccount, provider "nonroot": Forbidden: not usable by user or serviceaccount, provider "hostmount-anyuid": Forbidden: not usable by user or serviceaccount, provider "elasticsearch-scc": Forbidden: not usable by user or serviceaccount, provider "machine-api-termination-handler": Forbidden: not usable by user or serviceaccount, provider "daemonset-with-hostport": Forbidden: not usable by user or serviceaccount, provider "hostnetwork-v2": Forbidden: not usable by user or serviceaccount, provider "hostnetwork": Forbidden: not usable by user or serviceaccount, provider "hostaccess": Forbidden: not usable by user or serviceaccount, provider "node-exporter": Forbidden: not usable by user or serviceaccount, provider "privileged": Forbidden: not usable by user or serviceaccount] daemonset-controller logger.go:42: 07:33:36 | examples-agent-with-priority-class | 2025-03-10 07:33:35 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:33:36 | examples-agent-with-priority-class | Deleting namespace: kuttl-test-warm-moth === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- FAIL: kuttl (1055.04s) --- FAIL: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.88s) --- PASS: kuttl/harness/examples-simple-prod (70.11s) --- FAIL: kuttl/harness/examples-with-sampling (606.86s) --- PASS: kuttl/harness/examples-with-badger (39.57s) --- PASS: kuttl/harness/examples-simplest (38.86s) --- PASS: kuttl/harness/examples-simple-prod-with-volumes (66.08s) --- PASS: kuttl/harness/examples-business-application-injected-sidecar (49.19s) --- PASS: kuttl/harness/examples-service-types (74.56s) --- PASS: kuttl/harness/examples-openshift-with-htpasswd (24.66s) --- PASS: kuttl/harness/examples-all-in-one-with-options (40.63s) --- PASS: kuttl/harness/examples-agent-with-priority-class (38.48s) FAIL + exit_code=1 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name examples --report --output /logs/artifacts/examples.xml ./artifacts/kuttl-report.xml time="2025-03-10T07:33:49Z" level=debug msg="Setting a new name for the test suites" time="2025-03-10T07:33:49Z" level=debug msg="Removing 'artifacts' TestCase" time="2025-03-10T07:33:49Z" level=debug msg="normalizing test case names" time="2025-03-10T07:33:49Z" level=debug msg="examples/artifacts -> examples_artifacts" time="2025-03-10T07:33:49Z" level=debug msg="examples/examples-simple-prod -> examples_examples_simple_prod" time="2025-03-10T07:33:49Z" level=debug msg="examples/examples-with-sampling -> examples_examples_with_sampling" time="2025-03-10T07:33:49Z" level=debug msg="examples/examples-with-badger -> examples_examples_with_badger" time="2025-03-10T07:33:49Z" level=debug msg="examples/examples-simplest -> examples_examples_simplest" time="2025-03-10T07:33:49Z" level=debug msg="examples/examples-simple-prod-with-volumes -> examples_examples_simple_prod_with_volumes" time="2025-03-10T07:33:49Z" level=debug msg="examples/examples-business-application-injected-sidecar -> examples_examples_business_application_injected_sidecar" time="2025-03-10T07:33:49Z" level=debug msg="examples/examples-service-types -> examples_examples_service_types" time="2025-03-10T07:33:49Z" level=debug msg="examples/examples-openshift-with-htpasswd -> examples_examples_openshift_with_htpasswd" time="2025-03-10T07:33:49Z" level=debug msg="examples/examples-all-in-one-with-options -> examples_examples_all_in_one_with_options" time="2025-03-10T07:33:49Z" level=debug msg="examples/examples-agent-with-priority-class -> examples_examples_agent_with_priority_class" +---------------------------------------------------------+--------+ | NAME | RESULT | +---------------------------------------------------------+--------+ | examples_artifacts | passed | | examples_examples_simple_prod | passed | | examples_examples_with_sampling | failed | | examples_examples_with_badger | passed | | examples_examples_simplest | passed | | examples_examples_simple_prod_with_volumes | passed | | examples_examples_business_application_injected_sidecar | passed | | examples_examples_service_types | passed | | examples_examples_openshift_with_htpasswd | passed | | examples_examples_all_in_one_with_options | passed | | examples_examples_agent_with_priority_class | passed | +---------------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh generate false true + '[' 3 -ne 3 ']' + test_suite_name=generate + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/generate.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-generate make[2]: Entering directory '/tmp/jaeger-tests' test -s /tmp/jaeger-tests/bin/operator-sdk || curl -sLo /tmp/jaeger-tests/bin/operator-sdk https://github.com/operator-framework/operator-sdk/releases/download/v1.32.0/operator-sdk_`go env GOOS`_`go env GOARCH` ./hack/install/install-golangci-lint.sh Installing golangci-lint Try 0... go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.55.2 ./hack/install/install-goimports.sh Installing goimports Try 0... go install golang.org/x/tools/cmd/goimports@v0.1.12 >>>> Formatting code... ./.ci/format.sh >>>> Building... ./hack/install/install-dependencies.sh Installing go dependencies Try 0... go mod download GOOS= GOARCH= CGO_ENABLED=0 GO111MODULE=on go build -ldflags "-X "github.com/jaegertracing/jaeger-operator/pkg/version".version="1.65.0" -X "github.com/jaegertracing/jaeger-operator/pkg/version".buildDate=2025-03-10T07:33:55Z -X "github.com/jaegertracing/jaeger-operator/pkg/version".defaultJaeger="1.65.0" -X "github.com/jaegertracing/jaeger-operator/pkg/version".defaultAgent="1.62.0"" -o "bin/jaeger-operator" main.go JAEGER_VERSION="1.65.0" ./tests/e2e/generate/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.19.0-0.nightly-2025-03-09-063419 True False 37m Cluster version is 4.19.0-0.nightly-2025-03-09-063419' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.19.0-0.nightly-2025-03-09-063419 True False 37m Cluster version is 4.19.0-0.nightly-2025-03-09-063419' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/generate/render.sh ++ export SUITE_DIR=./tests/e2e/generate ++ SUITE_DIR=./tests/e2e/generate ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/generate ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + '[' true = true ']' + skip_test generate 'This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 2 -ne 2 ']' + test_name=generate + message='This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/generate/_build + '[' _build '!=' _build ']' + rm -rf generate + warning 'generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed\e[0m' WAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running generate E2E tests' Running generate E2E tests + cd tests/e2e/generate/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3414875983 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 07:34:14 | artifacts | Creating namespace: kuttl-test-learning-sunbird logger.go:42: 07:34:14 | artifacts | artifacts events from ns kuttl-test-learning-sunbird: logger.go:42: 07:34:14 | artifacts | Deleting namespace: kuttl-test-learning-sunbird === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (6.13s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.99s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name generate --report --output /logs/artifacts/generate.xml ./artifacts/kuttl-report.xml time="2025-03-10T07:34:20Z" level=debug msg="Setting a new name for the test suites" time="2025-03-10T07:34:20Z" level=debug msg="Removing 'artifacts' TestCase" time="2025-03-10T07:34:20Z" level=debug msg="normalizing test case names" time="2025-03-10T07:34:20Z" level=debug msg="generate/artifacts -> generate_artifacts" +--------------------+--------+ | NAME | RESULT | +--------------------+--------+ | generate_artifacts | passed | +--------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh miscellaneous false true + '[' 3 -ne 3 ']' + test_suite_name=miscellaneous + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/miscellaneous.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-miscellaneous make[2]: Entering directory '/tmp/jaeger-tests' SKIP_ES_EXTERNAL=true ./tests/e2e/miscellaneous/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.19.0-0.nightly-2025-03-09-063419 True False 37m Cluster version is 4.19.0-0.nightly-2025-03-09-063419' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.19.0-0.nightly-2025-03-09-063419 True False 37m Cluster version is 4.19.0-0.nightly-2025-03-09-063419' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/miscellaneous/render.sh ++ export SUITE_DIR=./tests/e2e/miscellaneous ++ SUITE_DIR=./tests/e2e/miscellaneous ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/miscellaneous ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test collector-autoscale + '[' 1 -ne 1 ']' + test_name=collector-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-autoscale\e[0m' Rendering files for test collector-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p collector-autoscale + cd collector-autoscale + jaeger_name=simple-prod + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + ELASTICSEARCH_NODECOUNT=1 + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.resources.requests.memory="200m"' 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.autoscale=true 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.minReplicas=1 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.maxReplicas=2 01-install.yaml + version_lt 1.30 1.23 ++ echo 1.30 1.23 ++ tr ' ' '\n' ++ sort -rV ++ head -n 1 + test 1.30 '!=' 1.30 + rm ./03-assert.yaml + generate_otlp_e2e_tests http + test_protocol=http + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-http\e[0m' Rendering files for test collector-otlp-allinone-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-autoscale + '[' collector-autoscale '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-http + cd collector-otlp-allinone-http + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger http true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-http\e[0m' Rendering files for test collector-otlp-production-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-http + '[' collector-otlp-allinone-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-http + cd collector-otlp-production-http + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger http true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + generate_otlp_e2e_tests grpc + test_protocol=grpc + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-grpc\e[0m' Rendering files for test collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-http + '[' collector-otlp-production-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-grpc + cd collector-otlp-allinone-grpc + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-grpc\e[0m' Rendering files for test collector-otlp-production-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-grpc + '[' collector-otlp-allinone-grpc '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-grpc + cd collector-otlp-production-grpc + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + '[' true = true ']' + skip_test istio 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=istio + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-grpc + '[' collector-otlp-production-grpc '!=' _build ']' + cd .. + rm -rf istio + warning 'istio: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: istio: Test not supported in OpenShift\e[0m' WAR: istio: Test not supported in OpenShift + '[' true = true ']' + skip_test outside-cluster 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=outside-cluster + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + rm -rf outside-cluster + warning 'outside-cluster: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: outside-cluster: Test not supported in OpenShift\e[0m' WAR: outside-cluster: Test not supported in OpenShift + start_test set-custom-img + '[' 1 -ne 1 ']' + test_name=set-custom-img + echo =========================================================================== =========================================================================== + info 'Rendering files for test set-custom-img' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test set-custom-img\e[0m' Rendering files for test set-custom-img + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p set-custom-img + cd set-custom-img + jaeger_name=my-jaeger + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.image="test"' ./02-install.yaml + '[' true = true ']' + skip_test non-cluster-wide 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=non-cluster-wide + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/set-custom-img + '[' set-custom-img '!=' _build ']' + cd .. + rm -rf non-cluster-wide + warning 'non-cluster-wide: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: non-cluster-wide: Test not supported in OpenShift\e[0m' WAR: non-cluster-wide: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running miscellaneous E2E tests' Running miscellaneous E2E tests + cd tests/e2e/miscellaneous/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3414875983 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 8 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/cassandra-spark === PAUSE kuttl/harness/cassandra-spark === RUN kuttl/harness/collector-autoscale === PAUSE kuttl/harness/collector-autoscale === RUN kuttl/harness/collector-otlp-allinone-grpc === PAUSE kuttl/harness/collector-otlp-allinone-grpc === RUN kuttl/harness/collector-otlp-allinone-http === PAUSE kuttl/harness/collector-otlp-allinone-http === RUN kuttl/harness/collector-otlp-production-grpc === PAUSE kuttl/harness/collector-otlp-production-grpc === RUN kuttl/harness/collector-otlp-production-http === PAUSE kuttl/harness/collector-otlp-production-http === RUN kuttl/harness/set-custom-img === PAUSE kuttl/harness/set-custom-img === CONT kuttl/harness/artifacts logger.go:42: 07:34:31 | artifacts | Creating namespace: kuttl-test-outgoing-ewe logger.go:42: 07:34:32 | artifacts | artifacts events from ns kuttl-test-outgoing-ewe: logger.go:42: 07:34:32 | artifacts | Deleting namespace: kuttl-test-outgoing-ewe === CONT kuttl/harness/collector-otlp-allinone-http logger.go:42: 07:34:37 | collector-otlp-allinone-http | Creating namespace: kuttl-test-still-tiger logger.go:42: 07:34:37 | collector-otlp-allinone-http/0-install | starting test step 0-install logger.go:42: 07:34:38 | collector-otlp-allinone-http/0-install | Jaeger:kuttl-test-still-tiger/my-jaeger created logger.go:42: 07:34:44 | collector-otlp-allinone-http/0-install | test step completed 0-install logger.go:42: 07:34:44 | collector-otlp-allinone-http/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:34:44 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:34:45 | collector-otlp-allinone-http/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:34:52 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=http ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 07:34:52 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:34:52 | collector-otlp-allinone-http/1-smoke-test | job.batch/report-span created logger.go:42: 07:34:52 | collector-otlp-allinone-http/1-smoke-test | job.batch/check-span created logger.go:42: 07:35:05 | collector-otlp-allinone-http/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:35:05 | collector-otlp-allinone-http | collector-otlp-allinone-http events from ns kuttl-test-still-tiger: logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:42 +0000 UTC Normal Pod my-jaeger-6cc654766d-rxfw6 Binding Scheduled Successfully assigned kuttl-test-still-tiger/my-jaeger-6cc654766d-rxfw6 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:42 +0000 UTC Normal Pod my-jaeger-6cc654766d-rxfw6 AddedInterface Add eth0 [10.129.2.67/23] from ovn-kubernetes multus logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:42 +0000 UTC Normal Pod my-jaeger-6cc654766d-rxfw6.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:42 +0000 UTC Normal Pod my-jaeger-6cc654766d-rxfw6.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:42 +0000 UTC Normal Pod my-jaeger-6cc654766d-rxfw6.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:42 +0000 UTC Normal Pod my-jaeger-6cc654766d-rxfw6.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:42 +0000 UTC Normal Pod my-jaeger-6cc654766d-rxfw6.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:42 +0000 UTC Normal Pod my-jaeger-6cc654766d-rxfw6.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:42 +0000 UTC Normal ReplicaSet.apps my-jaeger-6cc654766d SuccessfulCreate Created pod: my-jaeger-6cc654766d-rxfw6 replicaset-controller logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:42 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-6cc654766d from 0 to 1 deployment-controller logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:48 +0000 UTC Normal Pod my-jaeger-6cc654766d-rxfw6.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:48 +0000 UTC Normal Pod my-jaeger-6cc654766d-rxfw6.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:48 +0000 UTC Normal ReplicaSet.apps my-jaeger-6cc654766d SuccessfulDelete Deleted pod: my-jaeger-6cc654766d-rxfw6 replicaset-controller logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:48 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-6cc654766d from 1 to 0 deployment-controller logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:49 +0000 UTC Normal Pod my-jaeger-746cb6458c-hpd9p Binding Scheduled Successfully assigned kuttl-test-still-tiger/my-jaeger-746cb6458c-hpd9p to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:49 +0000 UTC Normal ReplicaSet.apps my-jaeger-746cb6458c SuccessfulCreate Created pod: my-jaeger-746cb6458c-hpd9p replicaset-controller logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:49 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-746cb6458c from 0 to 1 deployment-controller logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:50 +0000 UTC Normal Pod my-jaeger-746cb6458c-hpd9p AddedInterface Add eth0 [10.129.2.68/23] from ovn-kubernetes multus logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:50 +0000 UTC Normal Pod my-jaeger-746cb6458c-hpd9p.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:50 +0000 UTC Normal Pod my-jaeger-746cb6458c-hpd9p.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:50 +0000 UTC Normal Pod my-jaeger-746cb6458c-hpd9p.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:50 +0000 UTC Normal Pod my-jaeger-746cb6458c-hpd9p.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:50 +0000 UTC Normal Pod my-jaeger-746cb6458c-hpd9p.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:50 +0000 UTC Normal Pod my-jaeger-746cb6458c-hpd9p.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:52 +0000 UTC Normal Pod check-span-m6q8z Binding Scheduled Successfully assigned kuttl-test-still-tiger/check-span-m6q8z to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:52 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-m6q8z job-controller logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:52 +0000 UTC Normal Pod report-span-jljml Binding Scheduled Successfully assigned kuttl-test-still-tiger/report-span-jljml to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:52 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-jljml job-controller logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:53 +0000 UTC Normal Pod check-span-m6q8z AddedInterface Add eth0 [10.131.0.38/23] from ovn-kubernetes multus logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:53 +0000 UTC Normal Pod check-span-m6q8z.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:53 +0000 UTC Normal Pod report-span-jljml AddedInterface Add eth0 [10.131.0.37/23] from ovn-kubernetes multus logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:53 +0000 UTC Normal Pod report-span-jljml.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:54 +0000 UTC Normal Pod check-span-m6q8z.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 630ms (630ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:54 +0000 UTC Normal Pod check-span-m6q8z.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:54 +0000 UTC Normal Pod check-span-m6q8z.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:54 +0000 UTC Normal Pod report-span-jljml.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 701ms (701ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:54 +0000 UTC Normal Pod report-span-jljml.spec.containers{report-span} Created Created container: report-span kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:34:54 +0000 UTC Normal Pod report-span-jljml.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:35:05 | collector-otlp-allinone-http | 2025-03-10 07:35:04 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:35:05 | collector-otlp-allinone-http | Deleting namespace: kuttl-test-still-tiger === CONT kuttl/harness/set-custom-img logger.go:42: 07:35:16 | set-custom-img | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:35:16 | set-custom-img | Ignoring check-collector-img.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:35:16 | set-custom-img | Creating namespace: kuttl-test-mighty-phoenix logger.go:42: 07:35:16 | set-custom-img/1-install | starting test step 1-install logger.go:42: 07:35:17 | set-custom-img/1-install | Jaeger:kuttl-test-mighty-phoenix/my-jaeger created logger.go:42: 07:35:55 | set-custom-img/1-install | test step completed 1-install logger.go:42: 07:35:55 | set-custom-img/2-install | starting test step 2-install logger.go:42: 07:35:55 | set-custom-img/2-install | Jaeger:kuttl-test-mighty-phoenix/my-jaeger updated logger.go:42: 07:35:55 | set-custom-img/2-install | test step completed 2-install logger.go:42: 07:35:55 | set-custom-img/3-check-image | starting test step 3-check-image logger.go:42: 07:35:55 | set-custom-img/3-check-image | running command: [sh -c ./check-collector-img.sh] logger.go:42: 07:35:55 | set-custom-img/3-check-image | Collector image missmatch. Expected: test. Has: registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:40a1c7fa23aa5ffa64e8e22aa38022f5d4d7ff644c46a3da7169b713d486c3c1 logger.go:42: 07:36:00 | set-custom-img/3-check-image | Collector image asserted properly! logger.go:42: 07:36:00 | set-custom-img/3-check-image | test step completed 3-check-image logger.go:42: 07:36:00 | set-custom-img | set-custom-img events from ns kuttl-test-mighty-phoenix: logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmightyphoenixmyjaeger-1-55c5f844hp5j Binding Scheduled Successfully assigned kuttl-test-mighty-phoenix/elasticsearch-cdm-kuttltestmightyphoenixmyjaeger-1-55c5f844hp5j to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:23 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestmightyphoenixmyjaeger-1-55c5f8487c SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestmightyphoenixmyjaeger-1-55c5f844hp5j replicaset-controller logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:23 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestmightyphoenixmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestmightyphoenixmyjaeger-1-55c5f8487c from 0 to 1 deployment-controller logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmightyphoenixmyjaeger-1-55c5f844hp5j AddedInterface Add eth0 [10.131.0.39/23] from ovn-kubernetes multus logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmightyphoenixmyjaeger-1-55c5f844hp5j.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:ebe8dc1ce7ba2f2badccbd4f55a96c60fccdc835fa3582b169ddd34fbf03ca76" already present on machine kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmightyphoenixmyjaeger-1-55c5f844hp5j.spec.containers{elasticsearch} Created Created container: elasticsearch kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmightyphoenixmyjaeger-1-55c5f844hp5j.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmightyphoenixmyjaeger-1-55c5f844hp5j.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:5e4e69d24d07f6efda72b0c0baddfd2979902bfa92eb0a707bd3ed822b7c4a4c" already present on machine kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmightyphoenixmyjaeger-1-55c5f844hp5j.spec.containers{proxy} Created Created container: proxy kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmightyphoenixmyjaeger-1-55c5f844hp5j.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:35 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestmightyphoenixmyjaeger-1-55c5f844hp5j.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:41 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestmightyphoenixmyjaeger-1-55c5f844hp5j.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:52 +0000 UTC Normal Pod my-jaeger-collector-66dbf44c9-ss4mj Binding Scheduled Successfully assigned kuttl-test-mighty-phoenix/my-jaeger-collector-66dbf44c9-ss4mj to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:52 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-66dbf44c9 SuccessfulCreate Created pod: my-jaeger-collector-66dbf44c9-ss4mj replicaset-controller logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:52 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-66dbf44c9 from 0 to 1 deployment-controller logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:52 +0000 UTC Normal Pod my-jaeger-query-58bb976c68-ttjj5 Binding Scheduled Successfully assigned kuttl-test-mighty-phoenix/my-jaeger-query-58bb976c68-ttjj5 to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:52 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-58bb976c68 SuccessfulCreate Created pod: my-jaeger-query-58bb976c68-ttjj5 replicaset-controller logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:52 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-58bb976c68 from 0 to 1 deployment-controller logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:53 +0000 UTC Normal Pod my-jaeger-collector-66dbf44c9-ss4mj AddedInterface Add eth0 [10.129.2.69/23] from ovn-kubernetes multus logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:53 +0000 UTC Normal Pod my-jaeger-collector-66dbf44c9-ss4mj.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:40a1c7fa23aa5ffa64e8e22aa38022f5d4d7ff644c46a3da7169b713d486c3c1" already present on machine kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:53 +0000 UTC Normal Pod my-jaeger-collector-66dbf44c9-ss4mj.spec.containers{jaeger-collector} Created Created container: jaeger-collector kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:53 +0000 UTC Normal Pod my-jaeger-collector-66dbf44c9-ss4mj.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:53 +0000 UTC Normal Pod my-jaeger-query-58bb976c68-ttjj5 AddedInterface Add eth0 [10.128.2.53/23] from ovn-kubernetes multus logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:53 +0000 UTC Normal Pod my-jaeger-query-58bb976c68-ttjj5.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:53 +0000 UTC Normal Pod my-jaeger-query-58bb976c68-ttjj5.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:53 +0000 UTC Normal Pod my-jaeger-query-58bb976c68-ttjj5.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:53 +0000 UTC Normal Pod my-jaeger-query-58bb976c68-ttjj5.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:53 +0000 UTC Normal Pod my-jaeger-query-58bb976c68-ttjj5.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:53 +0000 UTC Normal Pod my-jaeger-query-58bb976c68-ttjj5.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:53 +0000 UTC Normal Pod my-jaeger-query-58bb976c68-ttjj5.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:53 +0000 UTC Normal Pod my-jaeger-query-58bb976c68-ttjj5.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:53 +0000 UTC Normal Pod my-jaeger-query-58bb976c68-ttjj5.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:56 +0000 UTC Normal Pod my-jaeger-collector-66dbf44c9-ss4mj.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:56 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-66dbf44c9 SuccessfulDelete Deleted pod: my-jaeger-collector-66dbf44c9-ss4mj replicaset-controller logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:56 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled down replica set my-jaeger-collector-66dbf44c9 from 1 to 0 deployment-controller logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:57 +0000 UTC Normal Pod my-jaeger-collector-785fd4785d-gnl7d Binding Scheduled Successfully assigned kuttl-test-mighty-phoenix/my-jaeger-collector-785fd4785d-gnl7d to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:57 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-785fd4785d SuccessfulCreate Created pod: my-jaeger-collector-785fd4785d-gnl7d replicaset-controller logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:57 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-785fd4785d from 0 to 1 deployment-controller logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:58 +0000 UTC Normal Pod my-jaeger-collector-785fd4785d-gnl7d AddedInterface Add eth0 [10.129.2.70/23] from ovn-kubernetes multus logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:58 +0000 UTC Normal Pod my-jaeger-collector-785fd4785d-gnl7d.spec.containers{jaeger-collector} Pulling Pulling image "test" kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:58 +0000 UTC Warning Pod my-jaeger-collector-785fd4785d-gnl7d.spec.containers{jaeger-collector} Failed Failed to pull image "test": initializing source docker://test:latest: reading manifest latest in docker.io/library/test: requested access to the resource is denied kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:58 +0000 UTC Warning Pod my-jaeger-collector-785fd4785d-gnl7d.spec.containers{jaeger-collector} Failed Error: ErrImagePull kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:58 +0000 UTC Normal Pod my-jaeger-collector-785fd4785d-gnl7d.spec.containers{jaeger-collector} BackOff Back-off pulling image "test" kubelet logger.go:42: 07:36:00 | set-custom-img | 2025-03-10 07:35:58 +0000 UTC Warning Pod my-jaeger-collector-785fd4785d-gnl7d.spec.containers{jaeger-collector} Failed Error: ImagePullBackOff kubelet logger.go:42: 07:36:00 | set-custom-img | Deleting namespace: kuttl-test-mighty-phoenix === CONT kuttl/harness/collector-otlp-production-http logger.go:42: 07:36:06 | collector-otlp-production-http | Creating namespace: kuttl-test-logical-firefly logger.go:42: 07:36:07 | collector-otlp-production-http/1-install | starting test step 1-install logger.go:42: 07:36:07 | collector-otlp-production-http/1-install | Jaeger:kuttl-test-logical-firefly/my-jaeger created logger.go:42: 07:36:47 | collector-otlp-production-http/1-install | test step completed 1-install logger.go:42: 07:36:47 | collector-otlp-production-http/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:36:47 | collector-otlp-production-http/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:36:49 | collector-otlp-production-http/2-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:36:55 | collector-otlp-production-http/2-smoke-test | running command: [sh -c REPORTING_PROTOCOL=http ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 07:36:55 | collector-otlp-production-http/2-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:36:56 | collector-otlp-production-http/2-smoke-test | job.batch/report-span created logger.go:42: 07:36:56 | collector-otlp-production-http/2-smoke-test | job.batch/check-span created logger.go:42: 07:37:08 | collector-otlp-production-http/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:37:08 | collector-otlp-production-http | collector-otlp-production-http events from ns kuttl-test-logical-firefly: logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:14 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestlogicalfireflymyjaeger-1-6fd7f9774c SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestlogicalfireflymyjaeger-1-6fd7f9zd8jn replicaset-controller logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlogicalfireflymyjaeger-1-6fd7f9zd8jn Binding Scheduled Successfully assigned kuttl-test-logical-firefly/elasticsearch-cdm-kuttltestlogicalfireflymyjaeger-1-6fd7f9zd8jn to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlogicalfireflymyjaeger-1-6fd7f9zd8jn AddedInterface Add eth0 [10.131.0.40/23] from ovn-kubernetes multus logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlogicalfireflymyjaeger-1-6fd7f9zd8jn.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:ebe8dc1ce7ba2f2badccbd4f55a96c60fccdc835fa3582b169ddd34fbf03ca76" already present on machine kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlogicalfireflymyjaeger-1-6fd7f9zd8jn.spec.containers{elasticsearch} Created Created container: elasticsearch kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlogicalfireflymyjaeger-1-6fd7f9zd8jn.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlogicalfireflymyjaeger-1-6fd7f9zd8jn.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:5e4e69d24d07f6efda72b0c0baddfd2979902bfa92eb0a707bd3ed822b7c4a4c" already present on machine kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlogicalfireflymyjaeger-1-6fd7f9zd8jn.spec.containers{proxy} Created Created container: proxy kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlogicalfireflymyjaeger-1-6fd7f9zd8jn.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:14 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestlogicalfireflymyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestlogicalfireflymyjaeger-1-6fd7f9774c from 0 to 1 deployment-controller logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:27 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestlogicalfireflymyjaeger-1-6fd7f9zd8jn.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:32 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestlogicalfireflymyjaeger-1-6fd7f9zd8jn.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:44 +0000 UTC Normal Pod my-jaeger-collector-5b6cfcd7cb-gvmvh Binding Scheduled Successfully assigned kuttl-test-logical-firefly/my-jaeger-collector-5b6cfcd7cb-gvmvh to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:44 +0000 UTC Warning Pod my-jaeger-collector-5b6cfcd7cb-gvmvh FailedMount MountVolume.SetUp failed for volume "my-jaeger-collector-tls-config-volume" : secret "my-jaeger-collector-headless-tls" not found kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:44 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5b6cfcd7cb SuccessfulCreate Created pod: my-jaeger-collector-5b6cfcd7cb-gvmvh replicaset-controller logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:44 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-5b6cfcd7cb from 0 to 1 deployment-controller logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:44 +0000 UTC Normal Pod my-jaeger-query-6cd6dbcdf9-95977 Binding Scheduled Successfully assigned kuttl-test-logical-firefly/my-jaeger-query-6cd6dbcdf9-95977 to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:44 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-6cd6dbcdf9 SuccessfulCreate Created pod: my-jaeger-query-6cd6dbcdf9-95977 replicaset-controller logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:44 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-6cd6dbcdf9 from 0 to 1 deployment-controller logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:45 +0000 UTC Normal Pod my-jaeger-collector-5b6cfcd7cb-gvmvh AddedInterface Add eth0 [10.129.2.71/23] from ovn-kubernetes multus logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:45 +0000 UTC Normal Pod my-jaeger-collector-5b6cfcd7cb-gvmvh.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:40a1c7fa23aa5ffa64e8e22aa38022f5d4d7ff644c46a3da7169b713d486c3c1" already present on machine kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:45 +0000 UTC Normal Pod my-jaeger-collector-5b6cfcd7cb-gvmvh.spec.containers{jaeger-collector} Created Created container: jaeger-collector kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:45 +0000 UTC Normal Pod my-jaeger-collector-5b6cfcd7cb-gvmvh.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:45 +0000 UTC Normal Pod my-jaeger-query-6cd6dbcdf9-95977 AddedInterface Add eth0 [10.128.2.54/23] from ovn-kubernetes multus logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:45 +0000 UTC Normal Pod my-jaeger-query-6cd6dbcdf9-95977.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:45 +0000 UTC Normal Pod my-jaeger-query-6cd6dbcdf9-95977.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:45 +0000 UTC Normal Pod my-jaeger-query-6cd6dbcdf9-95977.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:45 +0000 UTC Normal Pod my-jaeger-query-6cd6dbcdf9-95977.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:45 +0000 UTC Normal Pod my-jaeger-query-6cd6dbcdf9-95977.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:45 +0000 UTC Normal Pod my-jaeger-query-6cd6dbcdf9-95977.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:45 +0000 UTC Normal Pod my-jaeger-query-6cd6dbcdf9-95977.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:45 +0000 UTC Normal Pod my-jaeger-query-6cd6dbcdf9-95977.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:45 +0000 UTC Normal Pod my-jaeger-query-6cd6dbcdf9-95977.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:50 +0000 UTC Normal Pod my-jaeger-query-6cd6dbcdf9-95977.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:50 +0000 UTC Normal Pod my-jaeger-query-6cd6dbcdf9-95977.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:50 +0000 UTC Normal Pod my-jaeger-query-6cd6dbcdf9-95977.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:50 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-6cd6dbcdf9 SuccessfulDelete Deleted pod: my-jaeger-query-6cd6dbcdf9-95977 replicaset-controller logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:50 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-6cd6dbcdf9 from 1 to 0 deployment-controller logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:51 +0000 UTC Normal Pod my-jaeger-query-5c645856c4-sbx9v Binding Scheduled Successfully assigned kuttl-test-logical-firefly/my-jaeger-query-5c645856c4-sbx9v to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:51 +0000 UTC Normal Pod my-jaeger-query-5c645856c4-sbx9v AddedInterface Add eth0 [10.128.2.55/23] from ovn-kubernetes multus logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:51 +0000 UTC Normal Pod my-jaeger-query-5c645856c4-sbx9v.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:51 +0000 UTC Normal Pod my-jaeger-query-5c645856c4-sbx9v.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:51 +0000 UTC Normal Pod my-jaeger-query-5c645856c4-sbx9v.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:51 +0000 UTC Normal Pod my-jaeger-query-5c645856c4-sbx9v.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:51 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-5c645856c4 SuccessfulCreate Created pod: my-jaeger-query-5c645856c4-sbx9v replicaset-controller logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:51 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-5c645856c4 from 0 to 1 deployment-controller logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:52 +0000 UTC Normal Pod my-jaeger-query-5c645856c4-sbx9v.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:52 +0000 UTC Normal Pod my-jaeger-query-5c645856c4-sbx9v.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:52 +0000 UTC Normal Pod my-jaeger-query-5c645856c4-sbx9v.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:52 +0000 UTC Normal Pod my-jaeger-query-5c645856c4-sbx9v.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:52 +0000 UTC Normal Pod my-jaeger-query-5c645856c4-sbx9v.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:56 +0000 UTC Normal Pod check-span-zh45s Binding Scheduled Successfully assigned kuttl-test-logical-firefly/check-span-zh45s to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:56 +0000 UTC Normal Pod check-span-zh45s AddedInterface Add eth0 [10.129.2.73/23] from ovn-kubernetes multus logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:56 +0000 UTC Normal Pod check-span-zh45s.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:56 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-zh45s job-controller logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:56 +0000 UTC Normal Pod report-span-pbtwc Binding Scheduled Successfully assigned kuttl-test-logical-firefly/report-span-pbtwc to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:56 +0000 UTC Normal Pod report-span-pbtwc AddedInterface Add eth0 [10.129.2.72/23] from ovn-kubernetes multus logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:56 +0000 UTC Normal Pod report-span-pbtwc.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:56 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-pbtwc job-controller logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:57 +0000 UTC Normal Pod check-span-zh45s.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 580ms (580ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:57 +0000 UTC Normal Pod check-span-zh45s.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:57 +0000 UTC Normal Pod check-span-zh45s.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:57 +0000 UTC Normal Pod report-span-pbtwc.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 696ms (696ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:57 +0000 UTC Normal Pod report-span-pbtwc.spec.containers{report-span} Created Created container: report-span kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:57 +0000 UTC Normal Pod report-span-pbtwc.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:59 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:59 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:36:59 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:37:08 | collector-otlp-production-http | 2025-03-10 07:37:07 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:37:08 | collector-otlp-production-http | Deleting namespace: kuttl-test-logical-firefly === CONT kuttl/harness/collector-otlp-production-grpc logger.go:42: 07:37:20 | collector-otlp-production-grpc | Creating namespace: kuttl-test-neat-goose logger.go:42: 07:37:20 | collector-otlp-production-grpc/1-install | starting test step 1-install logger.go:42: 07:37:20 | collector-otlp-production-grpc/1-install | Jaeger:kuttl-test-neat-goose/my-jaeger created logger.go:42: 07:37:54 | collector-otlp-production-grpc/1-install | test step completed 1-install logger.go:42: 07:37:54 | collector-otlp-production-grpc/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:37:54 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:37:56 | collector-otlp-production-grpc/2-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:38:04 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c REPORTING_PROTOCOL=grpc ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 07:38:04 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:38:05 | collector-otlp-production-grpc/2-smoke-test | job.batch/report-span created logger.go:42: 07:38:05 | collector-otlp-production-grpc/2-smoke-test | job.batch/check-span created logger.go:42: 07:38:25 | collector-otlp-production-grpc/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:38:25 | collector-otlp-production-grpc | collector-otlp-production-grpc events from ns kuttl-test-neat-goose: logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestneatgoosemyjaeger-1-56c7dfffd5-wlblw Binding Scheduled Successfully assigned kuttl-test-neat-goose/elasticsearch-cdm-kuttltestneatgoosemyjaeger-1-56c7dfffd5-wlblw to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:26 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestneatgoosemyjaeger-1-56c7dfffd5 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestneatgoosemyjaeger-1-56c7dfffd5-wlblw replicaset-controller logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:26 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestneatgoosemyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestneatgoosemyjaeger-1-56c7dfffd5 from 0 to 1 deployment-controller logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestneatgoosemyjaeger-1-56c7dfffd5-wlblw AddedInterface Add eth0 [10.131.0.41/23] from ovn-kubernetes multus logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestneatgoosemyjaeger-1-56c7dfffd5-wlblw.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:ebe8dc1ce7ba2f2badccbd4f55a96c60fccdc835fa3582b169ddd34fbf03ca76" already present on machine kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestneatgoosemyjaeger-1-56c7dfffd5-wlblw.spec.containers{elasticsearch} Created Created container: elasticsearch kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestneatgoosemyjaeger-1-56c7dfffd5-wlblw.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestneatgoosemyjaeger-1-56c7dfffd5-wlblw.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:5e4e69d24d07f6efda72b0c0baddfd2979902bfa92eb0a707bd3ed822b7c4a4c" already present on machine kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestneatgoosemyjaeger-1-56c7dfffd5-wlblw.spec.containers{proxy} Created Created container: proxy kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestneatgoosemyjaeger-1-56c7dfffd5-wlblw.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:40 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestneatgoosemyjaeger-1-56c7dfffd5-wlblw.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:45 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestneatgoosemyjaeger-1-56c7dfffd5-wlblw.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:51 +0000 UTC Normal Pod my-jaeger-collector-6f76dd8589-l7mv2 Binding Scheduled Successfully assigned kuttl-test-neat-goose/my-jaeger-collector-6f76dd8589-l7mv2 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:51 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-6f76dd8589 SuccessfulCreate Created pod: my-jaeger-collector-6f76dd8589-l7mv2 replicaset-controller logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:51 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-6f76dd8589 from 0 to 1 deployment-controller logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:51 +0000 UTC Normal Pod my-jaeger-query-7774c7df75-wpmgd Binding Scheduled Successfully assigned kuttl-test-neat-goose/my-jaeger-query-7774c7df75-wpmgd to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:51 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-7774c7df75 SuccessfulCreate Created pod: my-jaeger-query-7774c7df75-wpmgd replicaset-controller logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:51 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-7774c7df75 from 0 to 1 deployment-controller logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:52 +0000 UTC Normal Pod my-jaeger-collector-6f76dd8589-l7mv2 AddedInterface Add eth0 [10.129.2.74/23] from ovn-kubernetes multus logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:52 +0000 UTC Normal Pod my-jaeger-collector-6f76dd8589-l7mv2.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:40a1c7fa23aa5ffa64e8e22aa38022f5d4d7ff644c46a3da7169b713d486c3c1" already present on machine kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:52 +0000 UTC Normal Pod my-jaeger-collector-6f76dd8589-l7mv2.spec.containers{jaeger-collector} Created Created container: jaeger-collector kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:52 +0000 UTC Normal Pod my-jaeger-collector-6f76dd8589-l7mv2.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:52 +0000 UTC Normal Pod my-jaeger-query-7774c7df75-wpmgd AddedInterface Add eth0 [10.128.2.56/23] from ovn-kubernetes multus logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:52 +0000 UTC Normal Pod my-jaeger-query-7774c7df75-wpmgd.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:52 +0000 UTC Normal Pod my-jaeger-query-7774c7df75-wpmgd.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:52 +0000 UTC Normal Pod my-jaeger-query-7774c7df75-wpmgd.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:52 +0000 UTC Normal Pod my-jaeger-query-7774c7df75-wpmgd.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:52 +0000 UTC Normal Pod my-jaeger-query-7774c7df75-wpmgd.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:52 +0000 UTC Normal Pod my-jaeger-query-7774c7df75-wpmgd.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:52 +0000 UTC Normal Pod my-jaeger-query-7774c7df75-wpmgd.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:52 +0000 UTC Normal Pod my-jaeger-query-7774c7df75-wpmgd.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:37:52 +0000 UTC Normal Pod my-jaeger-query-7774c7df75-wpmgd.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:01 +0000 UTC Normal Pod my-jaeger-query-7774c7df75-wpmgd.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:01 +0000 UTC Normal Pod my-jaeger-query-7774c7df75-wpmgd.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:01 +0000 UTC Normal Pod my-jaeger-query-7774c7df75-wpmgd.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:01 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-7774c7df75 SuccessfulDelete Deleted pod: my-jaeger-query-7774c7df75-wpmgd replicaset-controller logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:01 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-7774c7df75 from 1 to 0 deployment-controller logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:02 +0000 UTC Normal Pod my-jaeger-query-5898664f59-jmtt6 Binding Scheduled Successfully assigned kuttl-test-neat-goose/my-jaeger-query-5898664f59-jmtt6 to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:02 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-5898664f59 SuccessfulCreate Created pod: my-jaeger-query-5898664f59-jmtt6 replicaset-controller logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:02 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-5898664f59 from 0 to 1 deployment-controller logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:03 +0000 UTC Normal Pod my-jaeger-query-5898664f59-jmtt6 AddedInterface Add eth0 [10.128.2.57/23] from ovn-kubernetes multus logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:03 +0000 UTC Normal Pod my-jaeger-query-5898664f59-jmtt6.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:03 +0000 UTC Normal Pod my-jaeger-query-5898664f59-jmtt6.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:03 +0000 UTC Normal Pod my-jaeger-query-5898664f59-jmtt6.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:03 +0000 UTC Normal Pod my-jaeger-query-5898664f59-jmtt6.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:03 +0000 UTC Normal Pod my-jaeger-query-5898664f59-jmtt6.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:03 +0000 UTC Normal Pod my-jaeger-query-5898664f59-jmtt6.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:03 +0000 UTC Normal Pod my-jaeger-query-5898664f59-jmtt6.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:03 +0000 UTC Normal Pod my-jaeger-query-5898664f59-jmtt6.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:03 +0000 UTC Normal Pod my-jaeger-query-5898664f59-jmtt6.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:05 +0000 UTC Normal Pod check-span-lctdg Binding Scheduled Successfully assigned kuttl-test-neat-goose/check-span-lctdg to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:05 +0000 UTC Normal Pod check-span-lctdg AddedInterface Add eth0 [10.129.2.76/23] from ovn-kubernetes multus logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:05 +0000 UTC Normal Pod check-span-lctdg.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:05 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-lctdg job-controller logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:05 +0000 UTC Normal Pod report-span-kc65h Binding Scheduled Successfully assigned kuttl-test-neat-goose/report-span-kc65h to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:05 +0000 UTC Normal Pod report-span-kc65h AddedInterface Add eth0 [10.129.2.75/23] from ovn-kubernetes multus logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:05 +0000 UTC Normal Pod report-span-kc65h.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:05 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-kc65h job-controller logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:06 +0000 UTC Normal Pod check-span-lctdg.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 737ms (737ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:06 +0000 UTC Normal Pod check-span-lctdg.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:06 +0000 UTC Normal Pod check-span-lctdg.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:06 +0000 UTC Normal Pod report-span-kc65h.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 839ms (839ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:06 +0000 UTC Normal Pod report-span-kc65h.spec.containers{report-span} Created Created container: report-span kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:06 +0000 UTC Normal Pod report-span-kc65h.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:09 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:09 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:09 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:38:25 | collector-otlp-production-grpc | 2025-03-10 07:38:25 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:38:25 | collector-otlp-production-grpc | Deleting namespace: kuttl-test-neat-goose === CONT kuttl/harness/collector-autoscale logger.go:42: 07:38:37 | collector-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:38:37 | collector-autoscale | Creating namespace: kuttl-test-needed-terrapin logger.go:42: 07:38:37 | collector-autoscale/1-install | starting test step 1-install logger.go:42: 07:38:37 | collector-autoscale/1-install | Jaeger:kuttl-test-needed-terrapin/simple-prod created logger.go:42: 07:39:14 | collector-autoscale/1-install | test step completed 1-install logger.go:42: 07:39:14 | collector-autoscale/2- | starting test step 2- logger.go:42: 07:39:14 | collector-autoscale/2- | test step completed 2- logger.go:42: 07:39:14 | collector-autoscale | collector-autoscale events from ns kuttl-test-needed-terrapin: logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:38:43 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestneededterrapinsimpleprod-1-554b77c75c SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestneededterrapinsimpleprod-1-554bjkrnq replicaset-controller logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:38:43 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestneededterrapinsimpleprod-1-554bjkrnq Binding Scheduled Successfully assigned kuttl-test-needed-terrapin/elasticsearch-cdm-kuttltestneededterrapinsimpleprod-1-554bjkrnq to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:38:43 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestneededterrapinsimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestneededterrapinsimpleprod-1-554b77c75c from 0 to 1 deployment-controller logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:38:44 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestneededterrapinsimpleprod-1-554bjkrnq AddedInterface Add eth0 [10.131.0.42/23] from ovn-kubernetes multus logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:38:44 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestneededterrapinsimpleprod-1-554bjkrnq.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:ebe8dc1ce7ba2f2badccbd4f55a96c60fccdc835fa3582b169ddd34fbf03ca76" already present on machine kubelet logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:38:44 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestneededterrapinsimpleprod-1-554bjkrnq.spec.containers{elasticsearch} Created Created container: elasticsearch kubelet logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:38:44 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestneededterrapinsimpleprod-1-554bjkrnq.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:38:44 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestneededterrapinsimpleprod-1-554bjkrnq.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:5e4e69d24d07f6efda72b0c0baddfd2979902bfa92eb0a707bd3ed822b7c4a4c" already present on machine kubelet logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:38:44 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestneededterrapinsimpleprod-1-554bjkrnq.spec.containers{proxy} Created Created container: proxy kubelet logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:38:44 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestneededterrapinsimpleprod-1-554bjkrnq.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:38:56 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestneededterrapinsimpleprod-1-554bjkrnq.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:01 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestneededterrapinsimpleprod-1-554bjkrnq.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:12 +0000 UTC Normal Pod simple-prod-collector-59dfd99f56-l7rl4 Binding Scheduled Successfully assigned kuttl-test-needed-terrapin/simple-prod-collector-59dfd99f56-l7rl4 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:12 +0000 UTC Warning Pod simple-prod-collector-59dfd99f56-l7rl4 FailedMount MountVolume.SetUp failed for volume "simple-prod-collector-tls-config-volume" : secret "simple-prod-collector-headless-tls" not found kubelet logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:12 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-59dfd99f56 SuccessfulCreate Created pod: simple-prod-collector-59dfd99f56-l7rl4 replicaset-controller logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:12 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-59dfd99f56 from 0 to 1 deployment-controller logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:12 +0000 UTC Normal Pod simple-prod-query-b459d95cc-cgh84 Binding Scheduled Successfully assigned kuttl-test-needed-terrapin/simple-prod-query-b459d95cc-cgh84 to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:12 +0000 UTC Normal ReplicaSet.apps simple-prod-query-b459d95cc SuccessfulCreate Created pod: simple-prod-query-b459d95cc-cgh84 replicaset-controller logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:12 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-b459d95cc from 0 to 1 deployment-controller logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:13 +0000 UTC Normal Pod simple-prod-collector-59dfd99f56-l7rl4 AddedInterface Add eth0 [10.129.2.77/23] from ovn-kubernetes multus logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:13 +0000 UTC Normal Pod simple-prod-collector-59dfd99f56-l7rl4.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:40a1c7fa23aa5ffa64e8e22aa38022f5d4d7ff644c46a3da7169b713d486c3c1" already present on machine kubelet logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:13 +0000 UTC Normal Pod simple-prod-collector-59dfd99f56-l7rl4.spec.containers{jaeger-collector} Created Created container: jaeger-collector kubelet logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:13 +0000 UTC Normal Pod simple-prod-collector-59dfd99f56-l7rl4.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:13 +0000 UTC Normal Pod simple-prod-query-b459d95cc-cgh84 AddedInterface Add eth0 [10.128.2.58/23] from ovn-kubernetes multus logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:13 +0000 UTC Normal Pod simple-prod-query-b459d95cc-cgh84.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:13 +0000 UTC Normal Pod simple-prod-query-b459d95cc-cgh84.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:13 +0000 UTC Normal Pod simple-prod-query-b459d95cc-cgh84.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:13 +0000 UTC Normal Pod simple-prod-query-b459d95cc-cgh84.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:13 +0000 UTC Normal Pod simple-prod-query-b459d95cc-cgh84.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:13 +0000 UTC Normal Pod simple-prod-query-b459d95cc-cgh84.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:13 +0000 UTC Normal Pod simple-prod-query-b459d95cc-cgh84.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:13 +0000 UTC Normal Pod simple-prod-query-b459d95cc-cgh84.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:39:14 | collector-autoscale | 2025-03-10 07:39:13 +0000 UTC Normal Pod simple-prod-query-b459d95cc-cgh84.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:39:14 | collector-autoscale | Deleting namespace: kuttl-test-needed-terrapin === CONT kuttl/harness/collector-otlp-allinone-grpc logger.go:42: 07:39:21 | collector-otlp-allinone-grpc | Creating namespace: kuttl-test-talented-catfish logger.go:42: 07:39:21 | collector-otlp-allinone-grpc/0-install | starting test step 0-install logger.go:42: 07:39:21 | collector-otlp-allinone-grpc/0-install | Jaeger:kuttl-test-talented-catfish/my-jaeger created logger.go:42: 07:39:27 | collector-otlp-allinone-grpc/0-install | test step completed 0-install logger.go:42: 07:39:27 | collector-otlp-allinone-grpc/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:39:27 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:39:28 | collector-otlp-allinone-grpc/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:39:35 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=grpc ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 07:39:36 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:39:36 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/report-span created logger.go:42: 07:39:36 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/check-span created logger.go:42: 07:39:57 | collector-otlp-allinone-grpc/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | collector-otlp-allinone-grpc events from ns kuttl-test-talented-catfish: logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:25 +0000 UTC Normal Pod my-jaeger-cf7d485cc-mfbq8 Binding Scheduled Successfully assigned kuttl-test-talented-catfish/my-jaeger-cf7d485cc-mfbq8 to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:25 +0000 UTC Normal Pod my-jaeger-cf7d485cc-mfbq8 AddedInterface Add eth0 [10.128.2.59/23] from ovn-kubernetes multus logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:25 +0000 UTC Normal Pod my-jaeger-cf7d485cc-mfbq8.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:25 +0000 UTC Normal Pod my-jaeger-cf7d485cc-mfbq8.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:25 +0000 UTC Normal Pod my-jaeger-cf7d485cc-mfbq8.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:25 +0000 UTC Normal Pod my-jaeger-cf7d485cc-mfbq8.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:25 +0000 UTC Normal ReplicaSet.apps my-jaeger-cf7d485cc SuccessfulCreate Created pod: my-jaeger-cf7d485cc-mfbq8 replicaset-controller logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:25 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-cf7d485cc from 0 to 1 deployment-controller logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:26 +0000 UTC Normal Pod my-jaeger-cf7d485cc-mfbq8.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:26 +0000 UTC Normal Pod my-jaeger-cf7d485cc-mfbq8.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:33 +0000 UTC Normal Pod my-jaeger-7cbf4f44fd-nz426 Binding Scheduled Successfully assigned kuttl-test-talented-catfish/my-jaeger-7cbf4f44fd-nz426 to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:33 +0000 UTC Normal ReplicaSet.apps my-jaeger-7cbf4f44fd SuccessfulCreate Created pod: my-jaeger-7cbf4f44fd-nz426 replicaset-controller logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:33 +0000 UTC Normal Pod my-jaeger-cf7d485cc-mfbq8.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:33 +0000 UTC Normal Pod my-jaeger-cf7d485cc-mfbq8.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:33 +0000 UTC Normal ReplicaSet.apps my-jaeger-cf7d485cc SuccessfulDelete Deleted pod: my-jaeger-cf7d485cc-mfbq8 replicaset-controller logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:33 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-cf7d485cc from 1 to 0 deployment-controller logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:33 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-7cbf4f44fd from 0 to 1 deployment-controller logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:34 +0000 UTC Normal Pod my-jaeger-7cbf4f44fd-nz426 AddedInterface Add eth0 [10.128.2.60/23] from ovn-kubernetes multus logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:34 +0000 UTC Normal Pod my-jaeger-7cbf4f44fd-nz426.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:34 +0000 UTC Normal Pod my-jaeger-7cbf4f44fd-nz426.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:34 +0000 UTC Normal Pod my-jaeger-7cbf4f44fd-nz426.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:34 +0000 UTC Normal Pod my-jaeger-7cbf4f44fd-nz426.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:34 +0000 UTC Normal Pod my-jaeger-7cbf4f44fd-nz426.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:34 +0000 UTC Normal Pod my-jaeger-7cbf4f44fd-nz426.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:36 +0000 UTC Normal Pod check-span-ms4p7 Binding Scheduled Successfully assigned kuttl-test-talented-catfish/check-span-ms4p7 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:36 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-ms4p7 job-controller logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:36 +0000 UTC Normal Pod report-span-gz8xp Binding Scheduled Successfully assigned kuttl-test-talented-catfish/report-span-gz8xp to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:36 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-gz8xp job-controller logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:37 +0000 UTC Normal Pod check-span-ms4p7 AddedInterface Add eth0 [10.129.2.78/23] from ovn-kubernetes multus logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:37 +0000 UTC Normal Pod check-span-ms4p7.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:37 +0000 UTC Normal Pod report-span-gz8xp AddedInterface Add eth0 [10.131.0.43/23] from ovn-kubernetes multus logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:37 +0000 UTC Normal Pod report-span-gz8xp.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:37 +0000 UTC Normal Pod report-span-gz8xp.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 788ms (788ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:37 +0000 UTC Normal Pod report-span-gz8xp.spec.containers{report-span} Created Created container: report-span kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:37 +0000 UTC Normal Pod report-span-gz8xp.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:38 +0000 UTC Normal Pod check-span-ms4p7.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 1.253s (1.253s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:38 +0000 UTC Normal Pod check-span-ms4p7.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:38 +0000 UTC Normal Pod check-span-ms4p7.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | 2025-03-10 07:39:57 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:39:57 | collector-otlp-allinone-grpc | Deleting namespace: kuttl-test-talented-catfish === CONT kuttl/harness/cassandra-spark logger.go:42: 07:40:10 | cassandra-spark | Ignoring 01-assert.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:40:10 | cassandra-spark | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:40:10 | cassandra-spark | Creating namespace: kuttl-test-busy-swan logger.go:42: 07:40:10 | cassandra-spark | cassandra-spark events from ns kuttl-test-busy-swan: logger.go:42: 07:40:10 | cassandra-spark | Deleting namespace: kuttl-test-busy-swan === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (344.19s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.00s) --- PASS: kuttl/harness/collector-otlp-allinone-http (38.98s) --- PASS: kuttl/harness/set-custom-img (50.04s) --- PASS: kuttl/harness/collector-otlp-production-http (73.75s) --- PASS: kuttl/harness/collector-otlp-production-grpc (77.01s) --- PASS: kuttl/harness/collector-autoscale (43.39s) --- PASS: kuttl/harness/collector-otlp-allinone-grpc (49.03s) --- PASS: kuttl/harness/cassandra-spark (5.83s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name miscellaneous --report --output /logs/artifacts/miscellaneous.xml ./artifacts/kuttl-report.xml time="2025-03-10T07:40:16Z" level=debug msg="Setting a new name for the test suites" time="2025-03-10T07:40:16Z" level=debug msg="Removing 'artifacts' TestCase" time="2025-03-10T07:40:16Z" level=debug msg="normalizing test case names" time="2025-03-10T07:40:16Z" level=debug msg="miscellaneous/artifacts -> miscellaneous_artifacts" time="2025-03-10T07:40:16Z" level=debug msg="miscellaneous/collector-otlp-allinone-http -> miscellaneous_collector_otlp_allinone_http" time="2025-03-10T07:40:16Z" level=debug msg="miscellaneous/set-custom-img -> miscellaneous_set_custom_img" time="2025-03-10T07:40:16Z" level=debug msg="miscellaneous/collector-otlp-production-http -> miscellaneous_collector_otlp_production_http" time="2025-03-10T07:40:16Z" level=debug msg="miscellaneous/collector-otlp-production-grpc -> miscellaneous_collector_otlp_production_grpc" time="2025-03-10T07:40:16Z" level=debug msg="miscellaneous/collector-autoscale -> miscellaneous_collector_autoscale" time="2025-03-10T07:40:16Z" level=debug msg="miscellaneous/collector-otlp-allinone-grpc -> miscellaneous_collector_otlp_allinone_grpc" time="2025-03-10T07:40:16Z" level=debug msg="miscellaneous/cassandra-spark -> miscellaneous_cassandra_spark" +----------------------------------------------+--------+ | NAME | RESULT | +----------------------------------------------+--------+ | miscellaneous_artifacts | passed | | miscellaneous_collector_otlp_allinone_http | passed | | miscellaneous_set_custom_img | passed | | miscellaneous_collector_otlp_production_http | passed | | miscellaneous_collector_otlp_production_grpc | passed | | miscellaneous_collector_autoscale | passed | | miscellaneous_collector_otlp_allinone_grpc | passed | | miscellaneous_cassandra_spark | passed | +----------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh sidecar false true + '[' 3 -ne 3 ']' + test_suite_name=sidecar + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/sidecar.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-sidecar make[2]: Entering directory '/tmp/jaeger-tests' ./tests/e2e/sidecar/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.19.0-0.nightly-2025-03-09-063419 True False 43m Cluster version is 4.19.0-0.nightly-2025-03-09-063419' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.19.0-0.nightly-2025-03-09-063419 True False 43m Cluster version is 4.19.0-0.nightly-2025-03-09-063419' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/sidecar/render.sh ++ export SUITE_DIR=./tests/e2e/sidecar ++ SUITE_DIR=./tests/e2e/sidecar ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/sidecar ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + jaeger_service_name=order + start_test sidecar-deployment + '[' 1 -ne 1 ']' + test_name=sidecar-deployment + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-deployment' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-deployment\e[0m' Rendering files for test sidecar-deployment + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build + '[' _build '!=' _build ']' + mkdir -p sidecar-deployment + cd sidecar-deployment + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-namespace + '[' 1 -ne 1 ']' + test_name=sidecar-namespace + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-namespace' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-namespace\e[0m' Rendering files for test sidecar-namespace + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-deployment + '[' sidecar-deployment '!=' _build ']' + cd .. + mkdir -p sidecar-namespace + cd sidecar-namespace + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-skip-webhook + '[' 1 -ne 1 ']' + test_name=sidecar-skip-webhook + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-skip-webhook' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-skip-webhook\e[0m' Rendering files for test sidecar-skip-webhook + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-namespace + '[' sidecar-namespace '!=' _build ']' + cd .. + mkdir -p sidecar-skip-webhook + cd sidecar-skip-webhook + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running sidecar E2E tests' Running sidecar E2E tests + cd tests/e2e/sidecar/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3414875983 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 4 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/sidecar-deployment === PAUSE kuttl/harness/sidecar-deployment === RUN kuttl/harness/sidecar-namespace === PAUSE kuttl/harness/sidecar-namespace === RUN kuttl/harness/sidecar-skip-webhook === PAUSE kuttl/harness/sidecar-skip-webhook === CONT kuttl/harness/artifacts logger.go:42: 07:40:24 | artifacts | Creating namespace: kuttl-test-flowing-chow logger.go:42: 07:40:24 | artifacts | artifacts events from ns kuttl-test-flowing-chow: logger.go:42: 07:40:24 | artifacts | Deleting namespace: kuttl-test-flowing-chow === CONT kuttl/harness/sidecar-namespace logger.go:42: 07:40:30 | sidecar-namespace | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:40:30 | sidecar-namespace | Creating namespace: kuttl-test-wondrous-jaybird logger.go:42: 07:40:30 | sidecar-namespace/0-install | starting test step 0-install logger.go:42: 07:40:30 | sidecar-namespace/0-install | Jaeger:kuttl-test-wondrous-jaybird/agent-as-sidecar created logger.go:42: 07:40:36 | sidecar-namespace/0-install | test step completed 0-install logger.go:42: 07:40:36 | sidecar-namespace/1-install | starting test step 1-install logger.go:42: 07:40:36 | sidecar-namespace/1-install | Deployment:kuttl-test-wondrous-jaybird/vertx-create-span-sidecar created logger.go:42: 07:40:38 | sidecar-namespace/1-install | test step completed 1-install logger.go:42: 07:40:38 | sidecar-namespace/2-enable-injection | starting test step 2-enable-injection logger.go:42: 07:40:38 | sidecar-namespace/2-enable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="true"] logger.go:42: 07:40:38 | sidecar-namespace/2-enable-injection | namespace/kuttl-test-wondrous-jaybird annotated logger.go:42: 07:40:43 | sidecar-namespace/2-enable-injection | test step completed 2-enable-injection logger.go:42: 07:40:43 | sidecar-namespace/3-find-service | starting test step 3-find-service logger.go:42: 07:40:43 | sidecar-namespace/3-find-service | Job:kuttl-test-wondrous-jaybird/00-find-service created logger.go:42: 07:40:56 | sidecar-namespace/3-find-service | test step completed 3-find-service logger.go:42: 07:40:56 | sidecar-namespace/4-other-instance | starting test step 4-other-instance logger.go:42: 07:40:56 | sidecar-namespace/4-other-instance | Jaeger:kuttl-test-wondrous-jaybird/agent-as-sidecar2 created logger.go:42: 07:41:06 | sidecar-namespace/4-other-instance | test step completed 4-other-instance logger.go:42: 07:41:06 | sidecar-namespace/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 07:41:07 | sidecar-namespace/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 07:41:07 | sidecar-namespace/6-find-service | starting test step 6-find-service logger.go:42: 07:41:07 | sidecar-namespace/6-find-service | Job:kuttl-test-wondrous-jaybird/01-find-service created logger.go:42: 07:41:20 | sidecar-namespace/6-find-service | test step completed 6-find-service logger.go:42: 07:41:20 | sidecar-namespace/7-disable-injection | starting test step 7-disable-injection logger.go:42: 07:41:20 | sidecar-namespace/7-disable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="false"] logger.go:42: 07:41:20 | sidecar-namespace/7-disable-injection | namespace/kuttl-test-wondrous-jaybird annotated logger.go:42: 07:41:21 | sidecar-namespace/7-disable-injection | test step completed 7-disable-injection logger.go:42: 07:41:21 | sidecar-namespace | sidecar-namespace events from ns kuttl-test-wondrous-jaybird: logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:34 +0000 UTC Normal Pod agent-as-sidecar-65d5b885c-qp6cp Binding Scheduled Successfully assigned kuttl-test-wondrous-jaybird/agent-as-sidecar-65d5b885c-qp6cp to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:34 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-65d5b885c SuccessfulCreate Created pod: agent-as-sidecar-65d5b885c-qp6cp replicaset-controller logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:34 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-65d5b885c from 0 to 1 deployment-controller logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:35 +0000 UTC Normal Pod agent-as-sidecar-65d5b885c-qp6cp AddedInterface Add eth0 [10.129.2.79/23] from ovn-kubernetes multus logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:35 +0000 UTC Normal Pod agent-as-sidecar-65d5b885c-qp6cp.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:35 +0000 UTC Normal Pod agent-as-sidecar-65d5b885c-qp6cp.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:35 +0000 UTC Normal Pod agent-as-sidecar-65d5b885c-qp6cp.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:36 +0000 UTC Normal Pod vertx-create-span-sidecar-cdb84d44f-7s6fx Binding Scheduled Successfully assigned kuttl-test-wondrous-jaybird/vertx-create-span-sidecar-cdb84d44f-7s6fx to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:36 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-cdb84d44f SuccessfulCreate Created pod: vertx-create-span-sidecar-cdb84d44f-7s6fx replicaset-controller logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:36 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-cdb84d44f from 0 to 1 deployment-controller logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:37 +0000 UTC Normal Pod vertx-create-span-sidecar-cdb84d44f-7s6fx AddedInterface Add eth0 [10.131.0.44/23] from ovn-kubernetes multus logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:37 +0000 UTC Normal Pod vertx-create-span-sidecar-cdb84d44f-7s6fx.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:37 +0000 UTC Normal Pod vertx-create-span-sidecar-cdb84d44f-7s6fx.spec.containers{vertx-create-span-sidecar} Created Created container: vertx-create-span-sidecar kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:37 +0000 UTC Normal Pod vertx-create-span-sidecar-cdb84d44f-7s6fx.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:38 +0000 UTC Normal Pod vertx-create-span-sidecar-7cf5d9cd6b-b7j48 Binding Scheduled Successfully assigned kuttl-test-wondrous-jaybird/vertx-create-span-sidecar-7cf5d9cd6b-b7j48 to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:38 +0000 UTC Normal Pod vertx-create-span-sidecar-7cf5d9cd6b-b7j48 AddedInterface Add eth0 [10.128.2.61/23] from ovn-kubernetes multus logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:38 +0000 UTC Normal Pod vertx-create-span-sidecar-7cf5d9cd6b-b7j48.spec.containers{vertx-create-span-sidecar} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:38 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7cf5d9cd6b SuccessfulCreate Created pod: vertx-create-span-sidecar-7cf5d9cd6b-b7j48 replicaset-controller logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:38 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-7cf5d9cd6b from 0 to 1 deployment-controller logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:42 +0000 UTC Normal Pod vertx-create-span-sidecar-7cf5d9cd6b-b7j48.spec.containers{vertx-create-span-sidecar} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.245s (3.245s including waiting). Image size: 282912835 bytes. kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:42 +0000 UTC Normal Pod vertx-create-span-sidecar-7cf5d9cd6b-b7j48.spec.containers{vertx-create-span-sidecar} Created Created container: vertx-create-span-sidecar kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:42 +0000 UTC Normal Pod vertx-create-span-sidecar-7cf5d9cd6b-b7j48.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:42 +0000 UTC Normal Pod vertx-create-span-sidecar-7cf5d9cd6b-b7j48.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:42 +0000 UTC Normal Pod vertx-create-span-sidecar-7cf5d9cd6b-b7j48.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:42 +0000 UTC Normal Pod vertx-create-span-sidecar-7cf5d9cd6b-b7j48.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:43 +0000 UTC Normal Pod 00-find-service-frxvd Binding Scheduled Successfully assigned kuttl-test-wondrous-jaybird/00-find-service-frxvd to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:43 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-frxvd job-controller logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:44 +0000 UTC Normal Pod 00-find-service-frxvd AddedInterface Add eth0 [10.131.0.45/23] from ovn-kubernetes multus logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:44 +0000 UTC Normal Pod 00-find-service-frxvd.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:44 +0000 UTC Normal Pod 00-find-service-frxvd.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 671ms (671ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:44 +0000 UTC Normal Pod 00-find-service-frxvd.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:44 +0000 UTC Normal Pod 00-find-service-frxvd.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:45 +0000 UTC Warning Pod vertx-create-span-sidecar-cdb84d44f-7s6fx.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.44:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:45 +0000 UTC Warning Pod vertx-create-span-sidecar-cdb84d44f-7s6fx.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.44:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:47 +0000 UTC Normal Pod vertx-create-span-sidecar-cdb84d44f-7s6fx.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:47 +0000 UTC Warning Pod vertx-create-span-sidecar-cdb84d44f-7s6fx.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.44:8080/": read tcp 10.131.0.2:50386->10.131.0.44:8080: read: connection reset by peer kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:47 +0000 UTC Warning Pod vertx-create-span-sidecar-cdb84d44f-7s6fx.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.44:8080/": dial tcp 10.131.0.44:8080: connect: connection refused kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:50 +0000 UTC Warning Pod vertx-create-span-sidecar-7cf5d9cd6b-b7j48.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.61:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:50 +0000 UTC Warning Pod vertx-create-span-sidecar-7cf5d9cd6b-b7j48.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.61:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:52 +0000 UTC Normal Pod vertx-create-span-sidecar-7cf5d9cd6b-b7j48.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:53 +0000 UTC Warning Pod vertx-create-span-sidecar-7cf5d9cd6b-b7j48.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.61:8080/": read tcp 10.128.2.2:43414->10.128.2.61:8080: read: connection reset by peer kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:53 +0000 UTC Normal Pod vertx-create-span-sidecar-7cf5d9cd6b-b7j48.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:55 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:40:57 +0000 UTC Warning Pod vertx-create-span-sidecar-cdb84d44f-7s6fx.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.44:8080/": read tcp 10.131.0.2:58100->10.131.0.44:8080: read: connection reset by peer kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:03 +0000 UTC Normal Pod agent-as-sidecar2-777ffb474c-dr8km Binding Scheduled Successfully assigned kuttl-test-wondrous-jaybird/agent-as-sidecar2-777ffb474c-dr8km to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:03 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-777ffb474c SuccessfulCreate Created pod: agent-as-sidecar2-777ffb474c-dr8km replicaset-controller logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:03 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-777ffb474c from 0 to 1 deployment-controller logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:04 +0000 UTC Normal Pod agent-as-sidecar2-777ffb474c-dr8km AddedInterface Add eth0 [10.129.2.80/23] from ovn-kubernetes multus logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:04 +0000 UTC Normal Pod agent-as-sidecar2-777ffb474c-dr8km.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:04 +0000 UTC Normal Pod agent-as-sidecar2-777ffb474c-dr8km.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:04 +0000 UTC Normal Pod agent-as-sidecar2-777ffb474c-dr8km.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:04 +0000 UTC Warning Pod vertx-create-span-sidecar-7cf5d9cd6b-b7j48.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.61:8080/": dial tcp 10.128.2.61:8080: connect: connection refused kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:06 +0000 UTC Normal Pod agent-as-sidecar-65d5b885c-qp6cp.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:06 +0000 UTC Warning Pod agent-as-sidecar-65d5b885c-qp6cp.spec.containers{jaeger} Unhealthy Readiness probe failed: Get "http://10.129.2.79:14269/": dial tcp 10.129.2.79:14269: connect: connection refused kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:06 +0000 UTC Normal Pod vertx-create-span-sidecar-cdb84d44f-7s6fx.spec.containers{vertx-create-span-sidecar} Killing Stopping container vertx-create-span-sidecar kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:06 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-cdb84d44f SuccessfulDelete Deleted pod: vertx-create-span-sidecar-cdb84d44f-7s6fx replicaset-controller logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:06 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-cdb84d44f from 1 to 0 deployment-controller logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:06 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-64df5c9556 from 0 to 1 deployment-controller logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:07 +0000 UTC Normal Pod 01-find-service-4ksxf Binding Scheduled Successfully assigned kuttl-test-wondrous-jaybird/01-find-service-4ksxf to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:07 +0000 UTC Normal Pod 01-find-service-4ksxf AddedInterface Add eth0 [10.129.2.81/23] from ovn-kubernetes multus logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:07 +0000 UTC Normal Pod 01-find-service-4ksxf.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:07 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-4ksxf job-controller logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:07 +0000 UTC Normal Pod vertx-create-span-sidecar-64df5c9556-pbkbh Binding Scheduled Successfully assigned kuttl-test-wondrous-jaybird/vertx-create-span-sidecar-64df5c9556-pbkbh to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:07 +0000 UTC Normal Pod vertx-create-span-sidecar-64df5c9556-pbkbh AddedInterface Add eth0 [10.131.0.46/23] from ovn-kubernetes multus logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:07 +0000 UTC Normal Pod vertx-create-span-sidecar-64df5c9556-pbkbh.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:07 +0000 UTC Normal Pod vertx-create-span-sidecar-64df5c9556-pbkbh.spec.containers{vertx-create-span-sidecar} Created Created container: vertx-create-span-sidecar kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:07 +0000 UTC Normal Pod vertx-create-span-sidecar-64df5c9556-pbkbh.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:07 +0000 UTC Normal Pod vertx-create-span-sidecar-64df5c9556-pbkbh.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:07 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-64df5c9556 SuccessfulCreate Created pod: vertx-create-span-sidecar-64df5c9556-pbkbh replicaset-controller logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:07 +0000 UTC Warning Pod vertx-create-span-sidecar-cdb84d44f-7s6fx.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.44:8080/": read tcp 10.131.0.2:38090->10.131.0.44:8080: read: connection reset by peer kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:07 +0000 UTC Warning Pod vertx-create-span-sidecar-cdb84d44f-7s6fx.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.44:8080/": read tcp 10.131.0.2:38102->10.131.0.44:8080: read: connection reset by peer kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:08 +0000 UTC Normal Pod 01-find-service-4ksxf.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 557ms (557ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:08 +0000 UTC Normal Pod 01-find-service-4ksxf.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:08 +0000 UTC Normal Pod 01-find-service-4ksxf.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:11 +0000 UTC Normal Pod vertx-create-span-sidecar-64df5c9556-pbkbh.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" in 3.651s (3.651s including waiting). Image size: 112614125 bytes. kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:11 +0000 UTC Normal Pod vertx-create-span-sidecar-64df5c9556-pbkbh.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:11 +0000 UTC Normal Pod vertx-create-span-sidecar-64df5c9556-pbkbh.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:15 +0000 UTC Warning Pod vertx-create-span-sidecar-64df5c9556-pbkbh.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.46:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:15 +0000 UTC Warning Pod vertx-create-span-sidecar-64df5c9556-pbkbh.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.46:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:17 +0000 UTC Normal Pod vertx-create-span-sidecar-64df5c9556-pbkbh.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:17 +0000 UTC Warning Pod vertx-create-span-sidecar-64df5c9556-pbkbh.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.46:8080/": read tcp 10.131.0.2:47690->10.131.0.46:8080: read: connection reset by peer kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:17 +0000 UTC Warning Pod vertx-create-span-sidecar-64df5c9556-pbkbh.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.46:8080/": dial tcp 10.131.0.46:8080: connect: connection refused kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:19 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:20 +0000 UTC Normal Pod vertx-create-span-sidecar-7cf5d9cd6b-b7j48.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:20 +0000 UTC Normal Pod vertx-create-span-sidecar-7cf5d9cd6b-b7j48.spec.containers{vertx-create-span-sidecar} Killing Stopping container vertx-create-span-sidecar kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:20 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7cf5d9cd6b SuccessfulDelete Deleted pod: vertx-create-span-sidecar-7cf5d9cd6b-b7j48 replicaset-controller logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:20 +0000 UTC Normal Pod vertx-create-span-sidecar-86d8464867-6jdzc Binding Scheduled Successfully assigned kuttl-test-wondrous-jaybird/vertx-create-span-sidecar-86d8464867-6jdzc to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:20 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-86d8464867 SuccessfulCreate Created pod: vertx-create-span-sidecar-86d8464867-6jdzc replicaset-controller logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:20 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-7cf5d9cd6b from 1 to 0 deployment-controller logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:20 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-86d8464867 from 0 to 1 deployment-controller logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:21 +0000 UTC Normal Pod vertx-create-span-sidecar-86d8464867-6jdzc AddedInterface Add eth0 [10.129.2.82/23] from ovn-kubernetes multus logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:21 +0000 UTC Normal Pod vertx-create-span-sidecar-86d8464867-6jdzc.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:21 +0000 UTC Normal Pod vertx-create-span-sidecar-86d8464867-6jdzc.spec.containers{vertx-create-span-sidecar} Created Created container: vertx-create-span-sidecar kubelet logger.go:42: 07:41:21 | sidecar-namespace | 2025-03-10 07:41:21 +0000 UTC Normal Pod vertx-create-span-sidecar-86d8464867-6jdzc.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:41:21 | sidecar-namespace | Deleting namespace: kuttl-test-wondrous-jaybird === CONT kuttl/harness/sidecar-skip-webhook logger.go:42: 07:41:28 | sidecar-skip-webhook | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:41:28 | sidecar-skip-webhook | Creating namespace: kuttl-test-resolved-adder logger.go:42: 07:41:28 | sidecar-skip-webhook/0-install | starting test step 0-install logger.go:42: 07:41:28 | sidecar-skip-webhook/0-install | Jaeger:kuttl-test-resolved-adder/agent-as-sidecar created logger.go:42: 07:41:34 | sidecar-skip-webhook/0-install | test step completed 0-install logger.go:42: 07:41:34 | sidecar-skip-webhook/1-install | starting test step 1-install logger.go:42: 07:41:34 | sidecar-skip-webhook/1-install | Deployment:kuttl-test-resolved-adder/vertx-create-span-sidecar created logger.go:42: 07:41:36 | sidecar-skip-webhook/1-install | test step completed 1-install logger.go:42: 07:41:36 | sidecar-skip-webhook/2-add-anotation-and-label | starting test step 2-add-anotation-and-label logger.go:42: 07:41:36 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name=jaeger-operator --namespace kuttl-test-resolved-adder] logger.go:42: 07:41:37 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar labeled logger.go:42: 07:41:37 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-resolved-adder] logger.go:42: 07:41:37 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar annotated logger.go:42: 07:41:37 | sidecar-skip-webhook/2-add-anotation-and-label | test step completed 2-add-anotation-and-label logger.go:42: 07:41:37 | sidecar-skip-webhook/3-remove-label | starting test step 3-remove-label logger.go:42: 07:41:37 | sidecar-skip-webhook/3-remove-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name- --namespace kuttl-test-resolved-adder] logger.go:42: 07:41:37 | sidecar-skip-webhook/3-remove-label | deployment.apps/vertx-create-span-sidecar unlabeled logger.go:42: 07:41:38 | sidecar-skip-webhook/3-remove-label | test step completed 3-remove-label logger.go:42: 07:41:38 | sidecar-skip-webhook | sidecar-skip-webhook events from ns kuttl-test-resolved-adder: logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:32 +0000 UTC Normal Pod agent-as-sidecar-845785d885-jklq2 Binding Scheduled Successfully assigned kuttl-test-resolved-adder/agent-as-sidecar-845785d885-jklq2 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:32 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-845785d885 SuccessfulCreate Created pod: agent-as-sidecar-845785d885-jklq2 replicaset-controller logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:32 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-845785d885 from 0 to 1 deployment-controller logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:33 +0000 UTC Normal Pod agent-as-sidecar-845785d885-jklq2 AddedInterface Add eth0 [10.129.2.83/23] from ovn-kubernetes multus logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:33 +0000 UTC Normal Pod agent-as-sidecar-845785d885-jklq2.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:33 +0000 UTC Normal Pod agent-as-sidecar-845785d885-jklq2.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:33 +0000 UTC Normal Pod agent-as-sidecar-845785d885-jklq2.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:34 +0000 UTC Normal Pod vertx-create-span-sidecar-cdb84d44f-mqczd Binding Scheduled Successfully assigned kuttl-test-resolved-adder/vertx-create-span-sidecar-cdb84d44f-mqczd to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:34 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-cdb84d44f SuccessfulCreate Created pod: vertx-create-span-sidecar-cdb84d44f-mqczd replicaset-controller logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:34 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-cdb84d44f from 0 to 1 deployment-controller logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:35 +0000 UTC Normal Pod vertx-create-span-sidecar-cdb84d44f-mqczd AddedInterface Add eth0 [10.131.0.47/23] from ovn-kubernetes multus logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:35 +0000 UTC Normal Pod vertx-create-span-sidecar-cdb84d44f-mqczd.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:35 +0000 UTC Normal Pod vertx-create-span-sidecar-cdb84d44f-mqczd.spec.containers{vertx-create-span-sidecar} Created Created container: vertx-create-span-sidecar kubelet logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:35 +0000 UTC Normal Pod vertx-create-span-sidecar-cdb84d44f-mqczd.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:37 +0000 UTC Normal Pod vertx-create-span-sidecar-b5d9c7f74-qrcn6 Binding Scheduled Successfully assigned kuttl-test-resolved-adder/vertx-create-span-sidecar-b5d9c7f74-qrcn6 to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:37 +0000 UTC Normal Pod vertx-create-span-sidecar-b5d9c7f74-qrcn6 AddedInterface Add eth0 [10.128.2.62/23] from ovn-kubernetes multus logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:37 +0000 UTC Normal Pod vertx-create-span-sidecar-b5d9c7f74-qrcn6.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:37 +0000 UTC Normal Pod vertx-create-span-sidecar-b5d9c7f74-qrcn6.spec.containers{vertx-create-span-sidecar} Created Created container: vertx-create-span-sidecar kubelet logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:37 +0000 UTC Normal Pod vertx-create-span-sidecar-b5d9c7f74-qrcn6.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:37 +0000 UTC Normal Pod vertx-create-span-sidecar-b5d9c7f74-qrcn6.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:37 +0000 UTC Normal Pod vertx-create-span-sidecar-b5d9c7f74-qrcn6.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:37 +0000 UTC Normal Pod vertx-create-span-sidecar-b5d9c7f74-qrcn6.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:37 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-b5d9c7f74 SuccessfulCreate Created pod: vertx-create-span-sidecar-b5d9c7f74-qrcn6 replicaset-controller logger.go:42: 07:41:38 | sidecar-skip-webhook | 2025-03-10 07:41:37 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-b5d9c7f74 from 0 to 1 deployment-controller logger.go:42: 07:41:38 | sidecar-skip-webhook | Deleting namespace: kuttl-test-resolved-adder === CONT kuttl/harness/sidecar-deployment logger.go:42: 07:41:45 | sidecar-deployment | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:41:45 | sidecar-deployment | Creating namespace: kuttl-test-moral-sheep logger.go:42: 07:41:45 | sidecar-deployment/0-install | starting test step 0-install logger.go:42: 07:41:46 | sidecar-deployment/0-install | Jaeger:kuttl-test-moral-sheep/agent-as-sidecar created logger.go:42: 07:41:52 | sidecar-deployment/0-install | test step completed 0-install logger.go:42: 07:41:52 | sidecar-deployment/1-install | starting test step 1-install logger.go:42: 07:41:52 | sidecar-deployment/1-install | Deployment:kuttl-test-moral-sheep/vertx-create-span-sidecar created logger.go:42: 07:41:54 | sidecar-deployment/1-install | test step completed 1-install logger.go:42: 07:41:54 | sidecar-deployment/2-enable-injection | starting test step 2-enable-injection logger.go:42: 07:41:54 | sidecar-deployment/2-enable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-moral-sheep] logger.go:42: 07:41:54 | sidecar-deployment/2-enable-injection | deployment.apps/vertx-create-span-sidecar annotated logger.go:42: 07:41:56 | sidecar-deployment/2-enable-injection | test step completed 2-enable-injection logger.go:42: 07:41:56 | sidecar-deployment/3-find-service | starting test step 3-find-service logger.go:42: 07:41:56 | sidecar-deployment/3-find-service | Job:kuttl-test-moral-sheep/00-find-service created logger.go:42: 07:42:10 | sidecar-deployment/3-find-service | test step completed 3-find-service logger.go:42: 07:42:10 | sidecar-deployment/4-other-instance | starting test step 4-other-instance logger.go:42: 07:42:10 | sidecar-deployment/4-other-instance | Jaeger:kuttl-test-moral-sheep/agent-as-sidecar2 created logger.go:42: 07:42:15 | sidecar-deployment/4-other-instance | test step completed 4-other-instance logger.go:42: 07:42:15 | sidecar-deployment/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 07:42:15 | sidecar-deployment/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 07:42:15 | sidecar-deployment/6-find-service | starting test step 6-find-service logger.go:42: 07:42:15 | sidecar-deployment/6-find-service | Job:kuttl-test-moral-sheep/01-find-service created logger.go:42: 07:42:27 | sidecar-deployment/6-find-service | test step completed 6-find-service logger.go:42: 07:42:27 | sidecar-deployment/7-disable-injection | starting test step 7-disable-injection logger.go:42: 07:42:27 | sidecar-deployment/7-disable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=false --namespace kuttl-test-moral-sheep] logger.go:42: 07:42:27 | sidecar-deployment/7-disable-injection | deployment.apps/vertx-create-span-sidecar annotated logger.go:42: 07:42:30 | sidecar-deployment/7-disable-injection | test step completed 7-disable-injection logger.go:42: 07:42:30 | sidecar-deployment | sidecar-deployment events from ns kuttl-test-moral-sheep: logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:50 +0000 UTC Normal Pod agent-as-sidecar-64cdcbd49-d4lxq Binding Scheduled Successfully assigned kuttl-test-moral-sheep/agent-as-sidecar-64cdcbd49-d4lxq to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:50 +0000 UTC Normal Pod agent-as-sidecar-64cdcbd49-d4lxq AddedInterface Add eth0 [10.129.2.84/23] from ovn-kubernetes multus logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:50 +0000 UTC Normal Pod agent-as-sidecar-64cdcbd49-d4lxq.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:50 +0000 UTC Normal Pod agent-as-sidecar-64cdcbd49-d4lxq.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:50 +0000 UTC Normal Pod agent-as-sidecar-64cdcbd49-d4lxq.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:50 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-64cdcbd49 SuccessfulCreate Created pod: agent-as-sidecar-64cdcbd49-d4lxq replicaset-controller logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:50 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-64cdcbd49 from 0 to 1 deployment-controller logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:52 +0000 UTC Normal Pod vertx-create-span-sidecar-cdb84d44f-gfm98 Binding Scheduled Successfully assigned kuttl-test-moral-sheep/vertx-create-span-sidecar-cdb84d44f-gfm98 to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:52 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-cdb84d44f SuccessfulCreate Created pod: vertx-create-span-sidecar-cdb84d44f-gfm98 replicaset-controller logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:52 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-cdb84d44f from 0 to 1 deployment-controller logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:53 +0000 UTC Normal Pod vertx-create-span-sidecar-cdb84d44f-gfm98 AddedInterface Add eth0 [10.131.0.48/23] from ovn-kubernetes multus logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:53 +0000 UTC Normal Pod vertx-create-span-sidecar-cdb84d44f-gfm98.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:53 +0000 UTC Normal Pod vertx-create-span-sidecar-cdb84d44f-gfm98.spec.containers{vertx-create-span-sidecar} Created Created container: vertx-create-span-sidecar kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:53 +0000 UTC Normal Pod vertx-create-span-sidecar-cdb84d44f-gfm98.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:54 +0000 UTC Normal Pod vertx-create-span-sidecar-74df674bd6-ql87w Binding Scheduled Successfully assigned kuttl-test-moral-sheep/vertx-create-span-sidecar-74df674bd6-ql87w to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:54 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-74df674bd6 SuccessfulCreate Created pod: vertx-create-span-sidecar-74df674bd6-ql87w replicaset-controller logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:54 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-74df674bd6 from 0 to 1 deployment-controller logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:55 +0000 UTC Normal Pod vertx-create-span-sidecar-74df674bd6-ql87w AddedInterface Add eth0 [10.131.0.49/23] from ovn-kubernetes multus logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:55 +0000 UTC Normal Pod vertx-create-span-sidecar-74df674bd6-ql87w.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:55 +0000 UTC Normal Pod vertx-create-span-sidecar-74df674bd6-ql87w.spec.containers{vertx-create-span-sidecar} Created Created container: vertx-create-span-sidecar kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:55 +0000 UTC Normal Pod vertx-create-span-sidecar-74df674bd6-ql87w.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:55 +0000 UTC Normal Pod vertx-create-span-sidecar-74df674bd6-ql87w.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:55 +0000 UTC Normal Pod vertx-create-span-sidecar-74df674bd6-ql87w.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:55 +0000 UTC Normal Pod vertx-create-span-sidecar-74df674bd6-ql87w.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:56 +0000 UTC Normal Pod 00-find-service-rqqj5 Binding Scheduled Successfully assigned kuttl-test-moral-sheep/00-find-service-rqqj5 to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:56 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-rqqj5 job-controller logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:57 +0000 UTC Normal Pod 00-find-service-rqqj5 AddedInterface Add eth0 [10.128.2.63/23] from ovn-kubernetes multus logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:57 +0000 UTC Normal Pod 00-find-service-rqqj5.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:58 +0000 UTC Normal Pod 00-find-service-rqqj5.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 781ms (781ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:58 +0000 UTC Normal Pod 00-find-service-rqqj5.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:41:58 +0000 UTC Normal Pod 00-find-service-rqqj5.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:01 +0000 UTC Warning Pod vertx-create-span-sidecar-cdb84d44f-gfm98.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.48:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:01 +0000 UTC Warning Pod vertx-create-span-sidecar-cdb84d44f-gfm98.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.48:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:03 +0000 UTC Warning Pod vertx-create-span-sidecar-74df674bd6-ql87w.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.49:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:03 +0000 UTC Warning Pod vertx-create-span-sidecar-74df674bd6-ql87w.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.49:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:03 +0000 UTC Normal Pod vertx-create-span-sidecar-cdb84d44f-gfm98.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:04 +0000 UTC Warning Pod vertx-create-span-sidecar-cdb84d44f-gfm98.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.48:8080/": read tcp 10.131.0.2:55466->10.131.0.48:8080: read: connection reset by peer kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:05 +0000 UTC Normal Pod vertx-create-span-sidecar-74df674bd6-ql87w.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:05 +0000 UTC Warning Pod vertx-create-span-sidecar-74df674bd6-ql87w.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.49:8080/": read tcp 10.131.0.2:49240->10.131.0.49:8080: read: connection reset by peer kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:09 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:14 +0000 UTC Normal Pod agent-as-sidecar2-579dffcc4c-sm76p Binding Scheduled Successfully assigned kuttl-test-moral-sheep/agent-as-sidecar2-579dffcc4c-sm76p to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:14 +0000 UTC Normal Pod agent-as-sidecar2-579dffcc4c-sm76p AddedInterface Add eth0 [10.128.2.64/23] from ovn-kubernetes multus logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:14 +0000 UTC Normal Pod agent-as-sidecar2-579dffcc4c-sm76p.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:14 +0000 UTC Normal Pod agent-as-sidecar2-579dffcc4c-sm76p.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:14 +0000 UTC Normal Pod agent-as-sidecar2-579dffcc4c-sm76p.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:14 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-579dffcc4c SuccessfulCreate Created pod: agent-as-sidecar2-579dffcc4c-sm76p replicaset-controller logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:14 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-579dffcc4c from 0 to 1 deployment-controller logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:15 +0000 UTC Normal Pod 01-find-service-nxrf8 Binding Scheduled Successfully assigned kuttl-test-moral-sheep/01-find-service-nxrf8 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:15 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-nxrf8 job-controller logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:15 +0000 UTC Normal Pod agent-as-sidecar-64cdcbd49-d4lxq.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:15 +0000 UTC Warning Pod vertx-create-span-sidecar-74df674bd6-ql87w.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.49:8080/": read tcp 10.131.0.2:35234->10.131.0.49:8080: read: connection reset by peer kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:15 +0000 UTC Warning Pod vertx-create-span-sidecar-74df674bd6-ql87w.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.49:8080/": dial tcp 10.131.0.49:8080: connect: connection refused kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:15 +0000 UTC Warning Pod vertx-create-span-sidecar-cdb84d44f-gfm98.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.48:8080/": dial tcp 10.131.0.48:8080: connect: connection refused kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:16 +0000 UTC Normal Pod 01-find-service-nxrf8 AddedInterface Add eth0 [10.129.2.85/23] from ovn-kubernetes multus logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:16 +0000 UTC Normal Pod 01-find-service-nxrf8.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:16 +0000 UTC Normal Pod vertx-create-span-sidecar-6b5f6b9cdd-vfjf9 Binding Scheduled Successfully assigned kuttl-test-moral-sheep/vertx-create-span-sidecar-6b5f6b9cdd-vfjf9 to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:16 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-6b5f6b9cdd SuccessfulCreate Created pod: vertx-create-span-sidecar-6b5f6b9cdd-vfjf9 replicaset-controller logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:16 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-cdb84d44f SuccessfulDelete Deleted pod: vertx-create-span-sidecar-cdb84d44f-gfm98 replicaset-controller logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:16 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-cdb84d44f from 1 to 0 deployment-controller logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:16 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-6b5f6b9cdd from 0 to 1 deployment-controller logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:17 +0000 UTC Normal Pod 01-find-service-nxrf8.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 1.099s (1.099s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:17 +0000 UTC Normal Pod 01-find-service-nxrf8.spec.containers{asserts-container} Created Created container: asserts-container kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:17 +0000 UTC Normal Pod 01-find-service-nxrf8.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:17 +0000 UTC Normal Pod vertx-create-span-sidecar-6b5f6b9cdd-vfjf9 AddedInterface Add eth0 [10.128.2.65/23] from ovn-kubernetes multus logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:17 +0000 UTC Normal Pod vertx-create-span-sidecar-6b5f6b9cdd-vfjf9.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:17 +0000 UTC Normal Pod vertx-create-span-sidecar-6b5f6b9cdd-vfjf9.spec.containers{vertx-create-span-sidecar} Created Created container: vertx-create-span-sidecar kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:17 +0000 UTC Normal Pod vertx-create-span-sidecar-6b5f6b9cdd-vfjf9.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:17 +0000 UTC Normal Pod vertx-create-span-sidecar-6b5f6b9cdd-vfjf9.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:17 +0000 UTC Normal Pod vertx-create-span-sidecar-6b5f6b9cdd-vfjf9.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:17 +0000 UTC Normal Pod vertx-create-span-sidecar-6b5f6b9cdd-vfjf9.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:18 +0000 UTC Normal Pod vertx-create-span-sidecar-cdb84d44f-gfm98.spec.containers{vertx-create-span-sidecar} Killing Stopping container vertx-create-span-sidecar kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:25 +0000 UTC Warning Pod vertx-create-span-sidecar-6b5f6b9cdd-vfjf9.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.65:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:25 +0000 UTC Warning Pod vertx-create-span-sidecar-6b5f6b9cdd-vfjf9.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.65:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:25 +0000 UTC Warning Pod vertx-create-span-sidecar-74df674bd6-ql87w.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.49:8080/": read tcp 10.131.0.2:44156->10.131.0.49:8080: read: connection reset by peer kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:27 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:27 +0000 UTC Normal Pod vertx-create-span-sidecar-6b5f6b9cdd-vfjf9.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:27 +0000 UTC Warning Pod vertx-create-span-sidecar-6b5f6b9cdd-vfjf9.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.65:8080/": read tcp 10.128.2.2:55558->10.128.2.65:8080: read: connection reset by peer kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:27 +0000 UTC Warning Pod vertx-create-span-sidecar-6b5f6b9cdd-vfjf9.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.65:8080/": dial tcp 10.128.2.65:8080: connect: connection refused kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:27 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-74df674bd6 SuccessfulDelete Deleted pod: vertx-create-span-sidecar-74df674bd6-ql87w replicaset-controller logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:27 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-74df674bd6 from 1 to 0 deployment-controller logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:27 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-bf6497f7c from 0 to 1 deployment-controller logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:28 +0000 UTC Normal Pod vertx-create-span-sidecar-74df674bd6-ql87w.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:28 +0000 UTC Normal Pod vertx-create-span-sidecar-74df674bd6-ql87w.spec.containers{vertx-create-span-sidecar} Killing Stopping container vertx-create-span-sidecar kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:28 +0000 UTC Normal Pod vertx-create-span-sidecar-bf6497f7c-jl2s4 Binding Scheduled Successfully assigned kuttl-test-moral-sheep/vertx-create-span-sidecar-bf6497f7c-jl2s4 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:28 +0000 UTC Normal Pod vertx-create-span-sidecar-bf6497f7c-jl2s4 AddedInterface Add eth0 [10.129.2.86/23] from ovn-kubernetes multus logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:28 +0000 UTC Normal Pod vertx-create-span-sidecar-bf6497f7c-jl2s4.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:28 +0000 UTC Normal Pod vertx-create-span-sidecar-bf6497f7c-jl2s4.spec.containers{vertx-create-span-sidecar} Created Created container: vertx-create-span-sidecar kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:28 +0000 UTC Normal Pod vertx-create-span-sidecar-bf6497f7c-jl2s4.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:42:30 | sidecar-deployment | 2025-03-10 07:42:28 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-bf6497f7c SuccessfulCreate Created pod: vertx-create-span-sidecar-bf6497f7c-jl2s4 replicaset-controller logger.go:42: 07:42:30 | sidecar-deployment | Deleting namespace: kuttl-test-moral-sheep === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (132.62s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.94s) --- PASS: kuttl/harness/sidecar-namespace (57.94s) --- PASS: kuttl/harness/sidecar-skip-webhook (17.42s) --- PASS: kuttl/harness/sidecar-deployment (51.15s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name sidecar --report --output /logs/artifacts/sidecar.xml ./artifacts/kuttl-report.xml time="2025-03-10T07:42:37Z" level=debug msg="Setting a new name for the test suites" time="2025-03-10T07:42:37Z" level=debug msg="Removing 'artifacts' TestCase" time="2025-03-10T07:42:37Z" level=debug msg="normalizing test case names" time="2025-03-10T07:42:37Z" level=debug msg="sidecar/artifacts -> sidecar_artifacts" time="2025-03-10T07:42:37Z" level=debug msg="sidecar/sidecar-namespace -> sidecar_sidecar_namespace" time="2025-03-10T07:42:37Z" level=debug msg="sidecar/sidecar-skip-webhook -> sidecar_sidecar_skip_webhook" time="2025-03-10T07:42:37Z" level=debug msg="sidecar/sidecar-deployment -> sidecar_sidecar_deployment" +------------------------------+--------+ | NAME | RESULT | +------------------------------+--------+ | sidecar_artifacts | passed | | sidecar_sidecar_namespace | passed | | sidecar_sidecar_skip_webhook | passed | | sidecar_sidecar_deployment | passed | +------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh streaming false true + '[' 3 -ne 3 ']' + test_suite_name=streaming + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/streaming.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-streaming make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=3.6.0 \ SKIP_KAFKA=false \ SKIP_ES_EXTERNAL=true \ ./tests/e2e/streaming/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.19.0-0.nightly-2025-03-09-063419 True False 46m Cluster version is 4.19.0-0.nightly-2025-03-09-063419' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.19.0-0.nightly-2025-03-09-063419 True False 46m Cluster version is 4.19.0-0.nightly-2025-03-09-063419' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/streaming/render.sh ++ export SUITE_DIR=./tests/e2e/streaming ++ SUITE_DIR=./tests/e2e/streaming ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/streaming ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + '[' false = true ']' + start_test streaming-simple + '[' 1 -ne 1 ']' + test_name=streaming-simple + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-simple' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-simple\e[0m' Rendering files for test streaming-simple + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build + '[' _build '!=' _build ']' + mkdir -p streaming-simple + cd streaming-simple + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./01-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/streaming-jaeger-assert.yaml.template -o ./04-assert.yaml + render_smoke_test simple-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=simple-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + export JAEGER_NAME=simple-streaming + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-tls + '[' 1 -ne 1 ']' + test_name=streaming-with-tls + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-tls' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-tls\e[0m' Rendering files for test streaming-with-tls + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-simple + '[' streaming-simple '!=' _build ']' + cd .. + mkdir -p streaming-with-tls + cd streaming-with-tls + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./01-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + render_smoke_test tls-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=tls-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + export JAEGER_NAME=tls-streaming + JAEGER_NAME=tls-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-autoprovisioning-autoscale + '[' 1 -ne 1 ']' + test_name=streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-autoprovisioning-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-autoprovisioning-autoscale\e[0m' Rendering files for test streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-with-tls + '[' streaming-with-tls '!=' _build ']' + cd .. + mkdir -p streaming-with-autoprovisioning-autoscale + cd streaming-with-autoprovisioning-autoscale + '[' true = true ']' + rm ./00-install.yaml ./00-assert.yaml + render_install_elasticsearch upstream 01 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=01 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./01-assert.yaml + jaeger_name=auto-provisioned + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="20Mi"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="500m"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.autoscale=true ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.minReplicas=1 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.maxReplicas=2 ./02-install.yaml + render_assert_kafka true auto-provisioned 03 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provisioned + test_step=03 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./03-assert.yaml ++ expr 03 + 1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./04-assert.yaml ++ expr 03 + 2 + CLUSTER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./05-assert.yaml + version_lt 1.30 1.23 ++ echo 1.30 1.23 ++ tr ' ' '\n' ++ sort -rV ++ head -n 1 + test 1.30 '!=' 1.30 + rm ./08-assert.yaml + skip_test streaming-with-tls 'This test is flaky in Prow CI' + '[' 2 -ne 2 ']' + test_name=streaming-with-tls + message='This test is flaky in Prow CI' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-with-autoprovisioning-autoscale + '[' streaming-with-autoprovisioning-autoscale '!=' _build ']' + cd .. + rm -rf streaming-with-tls + warning 'streaming-with-tls: This test is flaky in Prow CI' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: streaming-with-tls: This test is flaky in Prow CI\e[0m' WAR: streaming-with-tls: This test is flaky in Prow CI + skip_test streaming-simple 'This test is flaky in Prow CI' + '[' 2 -ne 2 ']' + test_name=streaming-simple + message='This test is flaky in Prow CI' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build + '[' _build '!=' _build ']' + rm -rf streaming-simple + warning 'streaming-simple: This test is flaky in Prow CI' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: streaming-simple: This test is flaky in Prow CI\e[0m' WAR: streaming-simple: This test is flaky in Prow CI make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running streaming E2E tests' Running streaming E2E tests + cd tests/e2e/streaming/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3414875983 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 2 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/streaming-with-autoprovisioning-autoscale === PAUSE kuttl/harness/streaming-with-autoprovisioning-autoscale === CONT kuttl/harness/artifacts logger.go:42: 07:42:50 | artifacts | Creating namespace: kuttl-test-unique-lacewing logger.go:42: 07:42:50 | artifacts | artifacts events from ns kuttl-test-unique-lacewing: logger.go:42: 07:42:50 | artifacts | Deleting namespace: kuttl-test-unique-lacewing === CONT kuttl/harness/streaming-with-autoprovisioning-autoscale logger.go:42: 07:42:59 | streaming-with-autoprovisioning-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:42:59 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:42:59 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:42:59 | streaming-with-autoprovisioning-autoscale | Creating namespace: kuttl-test-still-kingfish logger.go:42: 07:42:59 | streaming-with-autoprovisioning-autoscale/1-install | starting test step 1-install logger.go:42: 07:42:59 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc create sa deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 07:42:59 | streaming-with-autoprovisioning-autoscale/1-install | serviceaccount/deploy-elasticsearch created logger.go:42: 07:42:59 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc adm policy add-scc-to-user privileged -z deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 07:42:59 | streaming-with-autoprovisioning-autoscale/1-install | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:privileged added: "deploy-elasticsearch" logger.go:42: 07:42:59 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 6] logger.go:42: 07:43:05 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_0.yml -n $NAMESPACE] logger.go:42: 07:43:05 | streaming-with-autoprovisioning-autoscale/1-install | statefulset.apps/elasticsearch created logger.go:42: 07:43:05 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 3] logger.go:42: 07:43:08 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_1.yml -n $NAMESPACE] logger.go:42: 07:43:08 | streaming-with-autoprovisioning-autoscale/1-install | service/elasticsearch created logger.go:42: 07:43:26 | streaming-with-autoprovisioning-autoscale/1-install | test step completed 1-install logger.go:42: 07:43:26 | streaming-with-autoprovisioning-autoscale/2-install | starting test step 2-install logger.go:42: 07:43:26 | streaming-with-autoprovisioning-autoscale/2-install | Jaeger:kuttl-test-still-kingfish/auto-provisioned created logger.go:42: 07:43:26 | streaming-with-autoprovisioning-autoscale/2-install | test step completed 2-install logger.go:42: 07:43:26 | streaming-with-autoprovisioning-autoscale/3- | starting test step 3- logger.go:42: 07:44:20 | streaming-with-autoprovisioning-autoscale/3- | test step completed 3- logger.go:42: 07:44:20 | streaming-with-autoprovisioning-autoscale/4- | starting test step 4- logger.go:42: 07:45:10 | streaming-with-autoprovisioning-autoscale/4- | test step completed 4- logger.go:42: 07:45:10 | streaming-with-autoprovisioning-autoscale/5- | starting test step 5- logger.go:42: 07:45:33 | streaming-with-autoprovisioning-autoscale/5- | test step completed 5- logger.go:42: 07:45:33 | streaming-with-autoprovisioning-autoscale/6- | starting test step 6- logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale/6- | test step completed 6- logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale/7- | starting test step 7- logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale/7- | test step completed 7- logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | streaming-with-autoprovisioning-autoscale events from ns kuttl-test-still-kingfish: logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:43:05 +0000 UTC Normal Pod elasticsearch-0 Binding Scheduled Successfully assigned kuttl-test-still-kingfish/elasticsearch-0 to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:43:05 +0000 UTC Normal StatefulSet.apps elasticsearch SuccessfulCreate create Pod elasticsearch-0 in StatefulSet elasticsearch successful statefulset-controller logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:43:06 +0000 UTC Normal Pod elasticsearch-0 AddedInterface Add eth0 [10.131.0.50/23] from ovn-kubernetes multus logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:43:06 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulling Pulling image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:43:13 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulled Successfully pulled image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" in 7.026s (7.026s including waiting). Image size: 758467647 bytes. kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:43:13 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Created Created container: elasticsearch kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:43:13 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:43:20 +0000 UTC Warning Pod elasticsearch-0.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Get "http://10.131.0.50:9200/": dial tcp 10.131.0.50:9200: connect: connection refused kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:43:31 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:43:32 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-zookeeper NoPods No matching pods found controllermanager logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:43:32 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:43:32 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-still-kingfish/data-auto-provisioned-zookeeper-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-5f7fd8c697-qt54z_ec50c56a-b0f0-4203-a72e-5d576b547af8 logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:43:34 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-acb94258-fa0c-41e1-94e3-e92c919cd3e8 ebs.csi.aws.com_aws-ebs-csi-driver-controller-5f7fd8c697-qt54z_ec50c56a-b0f0-4203-a72e-5d576b547af8 logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:43:35 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-still-kingfish/auto-provisioned-zookeeper-0 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:43:37 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-acb94258-fa0c-41e1-94e3-e92c919cd3e8" attachdetach-controller logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:43:46 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 AddedInterface Add eth0 [10.129.2.87/23] from ovn-kubernetes multus logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:43:46 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulling Pulling image "registry.redhat.io/amq-streams/kafka-39-rhel9@sha256:f1bfd15900dcde2229711cd765c553bff7e340438c3f984e25d34a8004b45934" kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:43:58 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulled Successfully pulled image "registry.redhat.io/amq-streams/kafka-39-rhel9@sha256:f1bfd15900dcde2229711cd765c553bff7e340438c3f984e25d34a8004b45934" in 12.157s (12.157s including waiting). Image size: 642219519 bytes. kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:43:58 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Created Created container: zookeeper kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:43:58 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:44:21 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:44:22 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-kafka NoPods No matching pods found controllermanager logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:44:22 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-still-kingfish/data-0-auto-provisioned-kafka-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-5f7fd8c697-qt54z_ec50c56a-b0f0-4203-a72e-5d576b547af8 logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:44:22 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:44:24 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-789231a1-06ad-4766-9c75-bbf6df802cc7 ebs.csi.aws.com_aws-ebs-csi-driver-controller-5f7fd8c697-qt54z_ec50c56a-b0f0-4203-a72e-5d576b547af8 logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:44:25 +0000 UTC Normal Pod auto-provisioned-kafka-0 Binding Scheduled Successfully assigned kuttl-test-still-kingfish/auto-provisioned-kafka-0 to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:44:27 +0000 UTC Normal Pod auto-provisioned-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-789231a1-06ad-4766-9c75-bbf6df802cc7" attachdetach-controller logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:44:32 +0000 UTC Normal Pod auto-provisioned-kafka-0 AddedInterface Add eth0 [10.128.2.66/23] from ovn-kubernetes multus logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:44:32 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Pulling Pulling image "registry.redhat.io/amq-streams/kafka-39-rhel9@sha256:f1bfd15900dcde2229711cd765c553bff7e340438c3f984e25d34a8004b45934" kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:44:46 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Pulled Successfully pulled image "registry.redhat.io/amq-streams/kafka-39-rhel9@sha256:f1bfd15900dcde2229711cd765c553bff7e340438c3f984e25d34a8004b45934" in 14.668s (14.669s including waiting). Image size: 642219519 bytes. kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:44:46 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Created Created container: kafka kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:44:46 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:12 +0000 UTC Normal Pod auto-provisioned-entity-operator-668448f965-gwfdb Binding Scheduled Successfully assigned kuttl-test-still-kingfish/auto-provisioned-entity-operator-668448f965-gwfdb to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:12 +0000 UTC Normal Pod auto-provisioned-entity-operator-668448f965-gwfdb AddedInterface Add eth0 [10.128.2.68/23] from ovn-kubernetes multus logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:12 +0000 UTC Normal Pod auto-provisioned-entity-operator-668448f965-gwfdb.spec.containers{topic-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel9-operator@sha256:f34b9b25ae2228e4599b5a3324bd8edae9ae507df8e7e5c1bec7e1f4eaed86da" already present on machine kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:12 +0000 UTC Normal Pod auto-provisioned-entity-operator-668448f965-gwfdb.spec.containers{topic-operator} Created Created container: topic-operator kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:12 +0000 UTC Normal Pod auto-provisioned-entity-operator-668448f965-gwfdb.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:12 +0000 UTC Normal Pod auto-provisioned-entity-operator-668448f965-gwfdb.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel9-operator@sha256:f34b9b25ae2228e4599b5a3324bd8edae9ae507df8e7e5c1bec7e1f4eaed86da" already present on machine kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:12 +0000 UTC Normal ReplicaSet.apps auto-provisioned-entity-operator-668448f965 SuccessfulCreate Created pod: auto-provisioned-entity-operator-668448f965-gwfdb replicaset-controller logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:12 +0000 UTC Normal Deployment.apps auto-provisioned-entity-operator ScalingReplicaSet Scaled up replica set auto-provisioned-entity-operator-668448f965 from 0 to 1 deployment-controller logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:13 +0000 UTC Normal Pod auto-provisioned-entity-operator-668448f965-gwfdb.spec.containers{user-operator} Created Created container: user-operator kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:13 +0000 UTC Normal Pod auto-provisioned-entity-operator-668448f965-gwfdb.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:35 +0000 UTC Normal Pod auto-provisioned-collector-5bc76d7486-4w7j7 Binding Scheduled Successfully assigned kuttl-test-still-kingfish/auto-provisioned-collector-5bc76d7486-4w7j7 to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:35 +0000 UTC Normal Pod auto-provisioned-collector-5bc76d7486-4w7j7 AddedInterface Add eth0 [10.131.0.51/23] from ovn-kubernetes multus logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:35 +0000 UTC Normal Pod auto-provisioned-collector-5bc76d7486-4w7j7.spec.containers{jaeger-collector} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:40a1c7fa23aa5ffa64e8e22aa38022f5d4d7ff644c46a3da7169b713d486c3c1" kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:35 +0000 UTC Normal ReplicaSet.apps auto-provisioned-collector-5bc76d7486 SuccessfulCreate Created pod: auto-provisioned-collector-5bc76d7486-4w7j7 replicaset-controller logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:35 +0000 UTC Normal Deployment.apps auto-provisioned-collector ScalingReplicaSet Scaled up replica set auto-provisioned-collector-5bc76d7486 from 0 to 1 deployment-controller logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:35 +0000 UTC Normal Pod auto-provisioned-ingester-8b9f77ccd-45qq4 Binding Scheduled Successfully assigned kuttl-test-still-kingfish/auto-provisioned-ingester-8b9f77ccd-45qq4 to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:35 +0000 UTC Normal ReplicaSet.apps auto-provisioned-ingester-8b9f77ccd SuccessfulCreate Created pod: auto-provisioned-ingester-8b9f77ccd-45qq4 replicaset-controller logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:35 +0000 UTC Normal Deployment.apps auto-provisioned-ingester ScalingReplicaSet Scaled up replica set auto-provisioned-ingester-8b9f77ccd from 0 to 1 deployment-controller logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:35 +0000 UTC Normal Pod auto-provisioned-query-845f559f95-5vn77 Binding Scheduled Successfully assigned kuttl-test-still-kingfish/auto-provisioned-query-845f559f95-5vn77 to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:35 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-845f559f95 SuccessfulCreate Created pod: auto-provisioned-query-845f559f95-5vn77 replicaset-controller logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:35 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-845f559f95 from 0 to 1 deployment-controller logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:36 +0000 UTC Normal Pod auto-provisioned-ingester-8b9f77ccd-45qq4 AddedInterface Add eth0 [10.131.0.52/23] from ovn-kubernetes multus logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:36 +0000 UTC Normal Pod auto-provisioned-ingester-8b9f77ccd-45qq4.spec.containers{jaeger-ingester} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:b06a13d35b7ff03ed3a5512b64e5a472a8365e99dc5c9ba69e844db2b5284ee8" kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:36 +0000 UTC Normal Pod auto-provisioned-query-845f559f95-5vn77 AddedInterface Add eth0 [10.129.2.88/23] from ovn-kubernetes multus logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:36 +0000 UTC Normal Pod auto-provisioned-query-845f559f95-5vn77.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:36 +0000 UTC Normal Pod auto-provisioned-query-845f559f95-5vn77.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:36 +0000 UTC Normal Pod auto-provisioned-query-845f559f95-5vn77.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:36 +0000 UTC Normal Pod auto-provisioned-query-845f559f95-5vn77.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:36 +0000 UTC Normal Pod auto-provisioned-query-845f559f95-5vn77.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:36 +0000 UTC Normal Pod auto-provisioned-query-845f559f95-5vn77.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:36 +0000 UTC Normal Pod auto-provisioned-query-845f559f95-5vn77.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:36 +0000 UTC Normal Pod auto-provisioned-query-845f559f95-5vn77.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:36 +0000 UTC Normal Pod auto-provisioned-query-845f559f95-5vn77.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:40 +0000 UTC Normal Pod auto-provisioned-ingester-8b9f77ccd-45qq4.spec.containers{jaeger-ingester} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:b06a13d35b7ff03ed3a5512b64e5a472a8365e99dc5c9ba69e844db2b5284ee8" in 4.13s (4.13s including waiting). Image size: 137309437 bytes. kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:40 +0000 UTC Normal Pod auto-provisioned-ingester-8b9f77ccd-45qq4.spec.containers{jaeger-ingester} Created Created container: jaeger-ingester kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:40 +0000 UTC Normal Pod auto-provisioned-ingester-8b9f77ccd-45qq4.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:41 +0000 UTC Normal Pod auto-provisioned-collector-5bc76d7486-4w7j7.spec.containers{jaeger-collector} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:40a1c7fa23aa5ffa64e8e22aa38022f5d4d7ff644c46a3da7169b713d486c3c1" in 5.284s (5.284s including waiting). Image size: 139779827 bytes. kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:41 +0000 UTC Normal Pod auto-provisioned-collector-5bc76d7486-4w7j7.spec.containers{jaeger-collector} Created Created container: jaeger-collector kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:41 +0000 UTC Normal Pod auto-provisioned-collector-5bc76d7486-4w7j7.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:41 +0000 UTC Warning Pod auto-provisioned-ingester-8b9f77ccd-45qq4.spec.containers{jaeger-ingester} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:50 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:50 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:50 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:50 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:50 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | 2025-03-10 07:45:50 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:45:52 | streaming-with-autoprovisioning-autoscale | Deleting namespace: kuttl-test-still-kingfish === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (201.15s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (8.99s) --- PASS: kuttl/harness/streaming-with-autoprovisioning-autoscale (192.03s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name streaming --report --output /logs/artifacts/streaming.xml ./artifacts/kuttl-report.xml time="2025-03-10T07:46:12Z" level=debug msg="Setting a new name for the test suites" time="2025-03-10T07:46:12Z" level=debug msg="Removing 'artifacts' TestCase" time="2025-03-10T07:46:12Z" level=debug msg="normalizing test case names" time="2025-03-10T07:46:12Z" level=debug msg="streaming/artifacts -> streaming_artifacts" time="2025-03-10T07:46:12Z" level=debug msg="streaming/streaming-with-autoprovisioning-autoscale -> streaming_streaming_with_autoprovisioning_autoscale" +-----------------------------------------------------+--------+ | NAME | RESULT | +-----------------------------------------------------+--------+ | streaming_artifacts | passed | | streaming_streaming_with_autoprovisioning_autoscale | passed | +-----------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh ui false true + '[' 3 -ne 3 ']' + test_suite_name=ui + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/ui.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-ui make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true ./tests/e2e/ui/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.19.0-0.nightly-2025-03-09-063419 True False 49m Cluster version is 4.19.0-0.nightly-2025-03-09-063419' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.19.0-0.nightly-2025-03-09-063419 True False 49m Cluster version is 4.19.0-0.nightly-2025-03-09-063419' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/ui/render.sh ++ export SUITE_DIR=./tests/e2e/ui ++ SUITE_DIR=./tests/e2e/ui ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/ui ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test allinone + '[' 1 -ne 1 ']' + test_name=allinone + echo =========================================================================== =========================================================================== + info 'Rendering files for test allinone' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test allinone\e[0m' Rendering files for test allinone + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build + '[' _build '!=' _build ']' + mkdir -p allinone + cd allinone + export GET_URL_COMMAND + export URL + export JAEGER_NAME=all-in-one-ui + JAEGER_NAME=all-in-one-ui + '[' true = true ']' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./01-curl.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./04-test-ui-config.yaml + start_test production + '[' 1 -ne 1 ']' + test_name=production + echo =========================================================================== =========================================================================== + info 'Rendering files for test production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test production\e[0m' Rendering files for test production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build/allinone + '[' allinone '!=' _build ']' + cd .. + mkdir -p production + cd production + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + [[ true = true ]] + [[ true = true ]] + render_install_jaeger production-ui production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + '[' true = true ']' + INSECURE=true + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-forbbiden-access.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-curl.yaml + INSECURE=true + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./05-check-disabled-security.yaml + ASSERT_PRESENT=false + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./06-check-NO-gaID.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./08-check-gaID.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running ui E2E tests' Running ui E2E tests + cd tests/e2e/ui/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3414875983 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 3 tests === RUN kuttl/harness === RUN kuttl/harness/allinone === PAUSE kuttl/harness/allinone === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/production === PAUSE kuttl/harness/production === CONT kuttl/harness/allinone logger.go:42: 07:46:19 | allinone | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:46:19 | allinone | Creating namespace: kuttl-test-electric-python logger.go:42: 07:46:19 | allinone/0-install | starting test step 0-install logger.go:42: 07:46:19 | allinone/0-install | Jaeger:kuttl-test-electric-python/all-in-one-ui created logger.go:42: 07:46:23 | allinone/0-install | test step completed 0-install logger.go:42: 07:46:23 | allinone/1-curl | starting test step 1-curl logger.go:42: 07:46:23 | allinone/1-curl | running command: [./ensure-ingress-host.sh] logger.go:42: 07:46:23 | allinone/1-curl | Checking the Ingress host value was populated logger.go:42: 07:46:23 | allinone/1-curl | Try number 0 logger.go:42: 07:46:23 | allinone/1-curl | error: error executing jsonpath "{.items[0].status.ingress[0].host}": Error executing template: array index out of bounds: index 0, length 0. Printing more information for debugging the template: logger.go:42: 07:46:23 | allinone/1-curl | template was: logger.go:42: 07:46:23 | allinone/1-curl | {.items[0].status.ingress[0].host} logger.go:42: 07:46:23 | allinone/1-curl | object given to jsonpath engine was: logger.go:42: 07:46:23 | allinone/1-curl | map[string]interface {}{"apiVersion":"v1", "items":[]interface {}{}, "kind":"List", "metadata":map[string]interface {}{"resourceVersion":""}} logger.go:42: 07:46:23 | allinone/1-curl | logger.go:42: 07:46:23 | allinone/1-curl | logger.go:42: 07:46:33 | allinone/1-curl | Try number 1 logger.go:42: 07:46:33 | allinone/1-curl | Hostname is all-in-one-ui-kuttl-test-electric-python.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com logger.go:42: 07:46:33 | allinone/1-curl | running command: [sh -c ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE all-in-one-ui] logger.go:42: 07:46:33 | allinone/1-curl | Checking an expected HTTP response logger.go:42: 07:46:33 | allinone/1-curl | Running in OpenShift logger.go:42: 07:46:33 | allinone/1-curl | User not provided. Getting the token... logger.go:42: 07:46:35 | allinone/1-curl | Warning: resource jaegers/all-in-one-ui is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:46:41 | allinone/1-curl | Try number 1/30 the https://all-in-one-ui-kuttl-test-electric-python.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search logger.go:42: 07:46:41 | allinone/1-curl | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:46:41 | allinone/1-curl | Try number 2/30 the https://all-in-one-ui-kuttl-test-electric-python.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search logger.go:42: 07:46:41 | allinone/1-curl | HTTP response is 503. 200 expected. Waiting 10 s logger.go:42: 07:46:51 | allinone/1-curl | Try number 3/30 the https://all-in-one-ui-kuttl-test-electric-python.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search logger.go:42: 07:46:51 | allinone/1-curl | curl response asserted properly logger.go:42: 07:46:51 | allinone/1-curl | test step completed 1-curl logger.go:42: 07:46:51 | allinone/2-delete | starting test step 2-delete logger.go:42: 07:46:51 | allinone/2-delete | Jaeger:kuttl-test-electric-python/all-in-one-ui created logger.go:42: 07:46:51 | allinone/2-delete | test step completed 2-delete logger.go:42: 07:46:51 | allinone/3-install | starting test step 3-install logger.go:42: 07:46:52 | allinone/3-install | Jaeger:kuttl-test-electric-python/all-in-one-ui updated logger.go:42: 07:46:52 | allinone/3-install | test step completed 3-install logger.go:42: 07:46:52 | allinone/4-test-ui-config | starting test step 4-test-ui-config logger.go:42: 07:46:52 | allinone/4-test-ui-config | running command: [./ensure-ingress-host.sh] logger.go:42: 07:46:52 | allinone/4-test-ui-config | Checking the Ingress host value was populated logger.go:42: 07:46:52 | allinone/4-test-ui-config | Try number 0 logger.go:42: 07:46:52 | allinone/4-test-ui-config | error: error executing jsonpath "{.items[0].status.ingress[0].host}": Error executing template: array index out of bounds: index 0, length 0. Printing more information for debugging the template: logger.go:42: 07:46:52 | allinone/4-test-ui-config | template was: logger.go:42: 07:46:52 | allinone/4-test-ui-config | {.items[0].status.ingress[0].host} logger.go:42: 07:46:52 | allinone/4-test-ui-config | object given to jsonpath engine was: logger.go:42: 07:46:52 | allinone/4-test-ui-config | map[string]interface {}{"apiVersion":"v1", "items":[]interface {}{}, "kind":"List", "metadata":map[string]interface {}{"resourceVersion":""}} logger.go:42: 07:46:52 | allinone/4-test-ui-config | logger.go:42: 07:46:52 | allinone/4-test-ui-config | logger.go:42: 07:47:02 | allinone/4-test-ui-config | Try number 1 logger.go:42: 07:47:02 | allinone/4-test-ui-config | Hostname is all-in-one-ui-kuttl-test-electric-python.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com logger.go:42: 07:47:02 | allinone/4-test-ui-config | running command: [sh -c ASSERT_PRESENT=true EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 07:47:02 | allinone/4-test-ui-config | time="2025-03-10T07:47:02Z" level=info msg="Querying https://all-in-one-ui-kuttl-test-electric-python.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search..." logger.go:42: 07:47:02 | allinone/4-test-ui-config | time="2025-03-10T07:47:02Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 07:47:02 | allinone/4-test-ui-config | time="2025-03-10T07:47:02Z" level=info msg="Polling to https://all-in-one-ui-kuttl-test-electric-python.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search" logger.go:42: 07:47:02 | allinone/4-test-ui-config | time="2025-03-10T07:47:02Z" level=info msg="Doing request number 0" logger.go:42: 07:47:02 | allinone/4-test-ui-config | time="2025-03-10T07:47:02Z" level=warning msg="Status code: 503" logger.go:42: 07:47:10 | allinone/4-test-ui-config | time="2025-03-10T07:47:10Z" level=info msg="Doing request number 1" logger.go:42: 07:47:10 | allinone/4-test-ui-config | time="2025-03-10T07:47:10Z" level=info msg="Content found and asserted!" logger.go:42: 07:47:10 | allinone/4-test-ui-config | time="2025-03-10T07:47:10Z" level=info msg="Success!" logger.go:42: 07:47:10 | allinone/4-test-ui-config | test step completed 4-test-ui-config logger.go:42: 07:47:10 | allinone | allinone events from ns kuttl-test-electric-python: logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:23 +0000 UTC Normal Pod all-in-one-ui-bf866bcff-j5nh2 Binding Scheduled Successfully assigned kuttl-test-electric-python/all-in-one-ui-bf866bcff-j5nh2 to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:23 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-bf866bcff SuccessfulCreate Created pod: all-in-one-ui-bf866bcff-j5nh2 replicaset-controller logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:23 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-bf866bcff from 0 to 1 deployment-controller logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:24 +0000 UTC Normal Pod all-in-one-ui-bf866bcff-j5nh2 AddedInterface Add eth0 [10.128.2.69/23] from ovn-kubernetes multus logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:24 +0000 UTC Normal Pod all-in-one-ui-bf866bcff-j5nh2.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:24 +0000 UTC Normal Pod all-in-one-ui-bf866bcff-j5nh2.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:24 +0000 UTC Normal Pod all-in-one-ui-bf866bcff-j5nh2.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:24 +0000 UTC Normal Pod all-in-one-ui-bf866bcff-j5nh2.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:24 +0000 UTC Normal Pod all-in-one-ui-bf866bcff-j5nh2.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:24 +0000 UTC Normal Pod all-in-one-ui-bf866bcff-j5nh2.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:36 +0000 UTC Normal Pod all-in-one-ui-bf866bcff-j5nh2.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:36 +0000 UTC Normal Pod all-in-one-ui-bf866bcff-j5nh2.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:36 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-bf866bcff SuccessfulDelete Deleted pod: all-in-one-ui-bf866bcff-j5nh2 replicaset-controller logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:36 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled down replica set all-in-one-ui-bf866bcff from 1 to 0 deployment-controller logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:38 +0000 UTC Normal Pod all-in-one-ui-57b95b95b4-976xz Binding Scheduled Successfully assigned kuttl-test-electric-python/all-in-one-ui-57b95b95b4-976xz to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:38 +0000 UTC Normal Pod all-in-one-ui-57b95b95b4-976xz AddedInterface Add eth0 [10.129.2.89/23] from ovn-kubernetes multus logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:38 +0000 UTC Normal Pod all-in-one-ui-57b95b95b4-976xz.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:38 +0000 UTC Normal Pod all-in-one-ui-57b95b95b4-976xz.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:38 +0000 UTC Normal Pod all-in-one-ui-57b95b95b4-976xz.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:38 +0000 UTC Normal Pod all-in-one-ui-57b95b95b4-976xz.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:38 +0000 UTC Normal Pod all-in-one-ui-57b95b95b4-976xz.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:38 +0000 UTC Normal Pod all-in-one-ui-57b95b95b4-976xz.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:38 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-57b95b95b4 SuccessfulCreate Created pod: all-in-one-ui-57b95b95b4-976xz replicaset-controller logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:38 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-57b95b95b4 from 0 to 1 deployment-controller logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:51 +0000 UTC Normal Pod all-in-one-ui-57b95b95b4-976xz.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:51 +0000 UTC Normal Pod all-in-one-ui-57b95b95b4-976xz.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:58 +0000 UTC Normal Pod all-in-one-ui-68ff5d8c9f-j84bt Binding Scheduled Successfully assigned kuttl-test-electric-python/all-in-one-ui-68ff5d8c9f-j84bt to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:58 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-68ff5d8c9f SuccessfulCreate Created pod: all-in-one-ui-68ff5d8c9f-j84bt replicaset-controller logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:58 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-68ff5d8c9f from 0 to 1 deployment-controller logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:59 +0000 UTC Normal Pod all-in-one-ui-68ff5d8c9f-j84bt AddedInterface Add eth0 [10.129.2.90/23] from ovn-kubernetes multus logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:59 +0000 UTC Normal Pod all-in-one-ui-68ff5d8c9f-j84bt.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:fa9aaf9ae026228265ce7f10fb451d5c52a4e0269ce1fa9024cbefddf765ae8d" already present on machine kubelet logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:59 +0000 UTC Normal Pod all-in-one-ui-68ff5d8c9f-j84bt.spec.containers{jaeger} Created Created container: jaeger kubelet logger.go:42: 07:47:10 | allinone | 2025-03-10 07:46:59 +0000 UTC Normal Pod all-in-one-ui-68ff5d8c9f-j84bt.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:47:10 | allinone | Deleting namespace: kuttl-test-electric-python === CONT kuttl/harness/production logger.go:42: 07:47:17 | production | Ignoring add-tracking-id.yaml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:47:17 | production | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:47:17 | production | Creating namespace: kuttl-test-suitable-ray logger.go:42: 07:47:17 | production/1-install | starting test step 1-install logger.go:42: 07:47:17 | production/1-install | Jaeger:kuttl-test-suitable-ray/production-ui created logger.go:42: 07:47:54 | production/1-install | test step completed 1-install logger.go:42: 07:47:54 | production/2-check-forbbiden-access | starting test step 2-check-forbbiden-access logger.go:42: 07:47:54 | production/2-check-forbbiden-access | running command: [./ensure-ingress-host.sh] logger.go:42: 07:47:54 | production/2-check-forbbiden-access | Checking the Ingress host value was populated logger.go:42: 07:47:54 | production/2-check-forbbiden-access | Try number 0 logger.go:42: 07:47:54 | production/2-check-forbbiden-access | Hostname is production-ui-kuttl-test-suitable-ray.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com logger.go:42: 07:47:54 | production/2-check-forbbiden-access | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE production-ui] logger.go:42: 07:47:54 | production/2-check-forbbiden-access | Checking an expected HTTP response logger.go:42: 07:47:54 | production/2-check-forbbiden-access | Running in OpenShift logger.go:42: 07:47:54 | production/2-check-forbbiden-access | Not using any secret logger.go:42: 07:47:54 | production/2-check-forbbiden-access | Try number 1/30 the https://production-ui-kuttl-test-suitable-ray.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search logger.go:42: 07:47:54 | production/2-check-forbbiden-access | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:47:54 | production/2-check-forbbiden-access | Try number 2/30 the https://production-ui-kuttl-test-suitable-ray.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search logger.go:42: 07:47:54 | production/2-check-forbbiden-access | curl response asserted properly logger.go:42: 07:47:54 | production/2-check-forbbiden-access | test step completed 2-check-forbbiden-access logger.go:42: 07:47:54 | production/3-curl | starting test step 3-curl logger.go:42: 07:47:54 | production/3-curl | running command: [./ensure-ingress-host.sh] logger.go:42: 07:47:54 | production/3-curl | Checking the Ingress host value was populated logger.go:42: 07:47:54 | production/3-curl | Try number 0 logger.go:42: 07:47:54 | production/3-curl | Hostname is production-ui-kuttl-test-suitable-ray.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com logger.go:42: 07:47:54 | production/3-curl | running command: [sh -c ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE production-ui] logger.go:42: 07:47:54 | production/3-curl | Checking an expected HTTP response logger.go:42: 07:47:54 | production/3-curl | Running in OpenShift logger.go:42: 07:47:54 | production/3-curl | User not provided. Getting the token... logger.go:42: 07:47:56 | production/3-curl | Warning: resource jaegers/production-ui is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:48:03 | production/3-curl | Try number 1/30 the https://production-ui-kuttl-test-suitable-ray.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search logger.go:42: 07:48:03 | production/3-curl | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:48:03 | production/3-curl | Try number 2/30 the https://production-ui-kuttl-test-suitable-ray.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search logger.go:42: 07:48:03 | production/3-curl | HTTP response is 503. 200 expected. Waiting 10 s logger.go:42: 07:48:13 | production/3-curl | Try number 3/30 the https://production-ui-kuttl-test-suitable-ray.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search logger.go:42: 07:48:13 | production/3-curl | curl response asserted properly logger.go:42: 07:48:13 | production/3-curl | test step completed 3-curl logger.go:42: 07:48:13 | production/4-install | starting test step 4-install logger.go:42: 07:48:13 | production/4-install | Jaeger:kuttl-test-suitable-ray/production-ui updated logger.go:42: 07:48:13 | production/4-install | test step completed 4-install logger.go:42: 07:48:13 | production/5-check-disabled-security | starting test step 5-check-disabled-security logger.go:42: 07:48:13 | production/5-check-disabled-security | running command: [./ensure-ingress-host.sh] logger.go:42: 07:48:13 | production/5-check-disabled-security | Checking the Ingress host value was populated logger.go:42: 07:48:13 | production/5-check-disabled-security | Try number 0 logger.go:42: 07:48:13 | production/5-check-disabled-security | Hostname is production-ui-kuttl-test-suitable-ray.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com logger.go:42: 07:48:13 | production/5-check-disabled-security | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE production-ui] logger.go:42: 07:48:13 | production/5-check-disabled-security | Checking an expected HTTP response logger.go:42: 07:48:13 | production/5-check-disabled-security | Running in OpenShift logger.go:42: 07:48:13 | production/5-check-disabled-security | Not using any secret logger.go:42: 07:48:13 | production/5-check-disabled-security | Try number 1/30 the https://production-ui-kuttl-test-suitable-ray.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search logger.go:42: 07:48:13 | production/5-check-disabled-security | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:48:13 | production/5-check-disabled-security | Try number 2/30 the https://production-ui-kuttl-test-suitable-ray.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search logger.go:42: 07:48:13 | production/5-check-disabled-security | HTTP response is 403. 200 expected. Waiting 10 s logger.go:42: 07:48:23 | production/5-check-disabled-security | Try number 3/30 the https://production-ui-kuttl-test-suitable-ray.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search logger.go:42: 07:48:23 | production/5-check-disabled-security | curl response asserted properly logger.go:42: 07:48:24 | production/5-check-disabled-security | test step completed 5-check-disabled-security logger.go:42: 07:48:24 | production/6-check-NO-gaID | starting test step 6-check-NO-gaID logger.go:42: 07:48:24 | production/6-check-NO-gaID | running command: [./ensure-ingress-host.sh] logger.go:42: 07:48:24 | production/6-check-NO-gaID | Checking the Ingress host value was populated logger.go:42: 07:48:24 | production/6-check-NO-gaID | Try number 0 logger.go:42: 07:48:24 | production/6-check-NO-gaID | Hostname is production-ui-kuttl-test-suitable-ray.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com logger.go:42: 07:48:24 | production/6-check-NO-gaID | running command: [sh -c ASSERT_PRESENT=false EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 07:48:24 | production/6-check-NO-gaID | time="2025-03-10T07:48:24Z" level=info msg="Querying https://production-ui-kuttl-test-suitable-ray.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search..." logger.go:42: 07:48:24 | production/6-check-NO-gaID | time="2025-03-10T07:48:24Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 07:48:24 | production/6-check-NO-gaID | time="2025-03-10T07:48:24Z" level=info msg="Polling to https://production-ui-kuttl-test-suitable-ray.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search" logger.go:42: 07:48:24 | production/6-check-NO-gaID | time="2025-03-10T07:48:24Z" level=info msg="Doing request number 0" logger.go:42: 07:48:24 | production/6-check-NO-gaID | time="2025-03-10T07:48:24Z" level=info msg="Content not found and asserted it was not found!" logger.go:42: 07:48:24 | production/6-check-NO-gaID | time="2025-03-10T07:48:24Z" level=info msg="Success!" logger.go:42: 07:48:24 | production/6-check-NO-gaID | test step completed 6-check-NO-gaID logger.go:42: 07:48:24 | production/7-add-tracking-id | starting test step 7-add-tracking-id logger.go:42: 07:48:24 | production/7-add-tracking-id | running command: [sh -c kubectl apply -f add-tracking-id.yaml -n $NAMESPACE] logger.go:42: 07:48:24 | production/7-add-tracking-id | jaeger.jaegertracing.io/production-ui configured logger.go:42: 07:48:24 | production/7-add-tracking-id | test step completed 7-add-tracking-id logger.go:42: 07:48:24 | production/8-check-gaID | starting test step 8-check-gaID logger.go:42: 07:48:24 | production/8-check-gaID | running command: [./ensure-ingress-host.sh] logger.go:42: 07:48:24 | production/8-check-gaID | Checking the Ingress host value was populated logger.go:42: 07:48:24 | production/8-check-gaID | Try number 0 logger.go:42: 07:48:25 | production/8-check-gaID | Hostname is production-ui-kuttl-test-suitable-ray.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com logger.go:42: 07:48:25 | production/8-check-gaID | running command: [sh -c ASSERT_PRESENT=true EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 07:48:25 | production/8-check-gaID | time="2025-03-10T07:48:25Z" level=info msg="Querying https://production-ui-kuttl-test-suitable-ray.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search..." logger.go:42: 07:48:25 | production/8-check-gaID | time="2025-03-10T07:48:25Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 07:48:25 | production/8-check-gaID | time="2025-03-10T07:48:25Z" level=info msg="Polling to https://production-ui-kuttl-test-suitable-ray.apps.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com/search" logger.go:42: 07:48:25 | production/8-check-gaID | time="2025-03-10T07:48:25Z" level=info msg="Doing request number 0" logger.go:42: 07:48:25 | production/8-check-gaID | time="2025-03-10T07:48:25Z" level=warning msg="Found: false . Assert: true" logger.go:42: 07:48:25 | production/8-check-gaID | time="2025-03-10T07:48:25Z" level=warning msg="The condition of the test function was not accomplished" logger.go:42: 07:48:33 | production/8-check-gaID | time="2025-03-10T07:48:33Z" level=info msg="Doing request number 1" logger.go:42: 07:48:33 | production/8-check-gaID | time="2025-03-10T07:48:33Z" level=info msg="Content found and asserted!" logger.go:42: 07:48:33 | production/8-check-gaID | time="2025-03-10T07:48:33Z" level=info msg="Success!" logger.go:42: 07:48:33 | production/8-check-gaID | test step completed 8-check-gaID logger.go:42: 07:48:33 | production | production events from ns kuttl-test-suitable-ray: logger.go:42: 07:48:33 | production | 2025-03-10 07:47:23 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestsuitablerayproductionui-1-7bf5c854bf SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestsuitablerayproductionui-1-7bf5cd8lch replicaset-controller logger.go:42: 07:48:33 | production | 2025-03-10 07:47:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsuitablerayproductionui-1-7bf5cd8lch Binding Scheduled Successfully assigned kuttl-test-suitable-ray/elasticsearch-cdm-kuttltestsuitablerayproductionui-1-7bf5cd8lch to ip-10-0-80-231.us-east-2.compute.internal default-scheduler logger.go:42: 07:48:33 | production | 2025-03-10 07:47:23 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestsuitablerayproductionui-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestsuitablerayproductionui-1-7bf5c854bf from 0 to 1 deployment-controller logger.go:42: 07:48:33 | production | 2025-03-10 07:47:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsuitablerayproductionui-1-7bf5cd8lch AddedInterface Add eth0 [10.131.0.53/23] from ovn-kubernetes multus logger.go:42: 07:48:33 | production | 2025-03-10 07:47:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsuitablerayproductionui-1-7bf5cd8lch.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:ebe8dc1ce7ba2f2badccbd4f55a96c60fccdc835fa3582b169ddd34fbf03ca76" already present on machine kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:47:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsuitablerayproductionui-1-7bf5cd8lch.spec.containers{elasticsearch} Created Created container: elasticsearch kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:47:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsuitablerayproductionui-1-7bf5cd8lch.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:47:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsuitablerayproductionui-1-7bf5cd8lch.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:5e4e69d24d07f6efda72b0c0baddfd2979902bfa92eb0a707bd3ed822b7c4a4c" already present on machine kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:47:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsuitablerayproductionui-1-7bf5cd8lch.spec.containers{proxy} Created Created container: proxy kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:47:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsuitablerayproductionui-1-7bf5cd8lch.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:47:35 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestsuitablerayproductionui-1-7bf5cd8lch.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:47:41 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestsuitablerayproductionui-1-7bf5cd8lch.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:47:51 +0000 UTC Normal Pod production-ui-collector-6fdf76dd49-d79rh Binding Scheduled Successfully assigned kuttl-test-suitable-ray/production-ui-collector-6fdf76dd49-d79rh to ip-10-0-49-237.us-east-2.compute.internal default-scheduler logger.go:42: 07:48:33 | production | 2025-03-10 07:47:51 +0000 UTC Normal ReplicaSet.apps production-ui-collector-6fdf76dd49 SuccessfulCreate Created pod: production-ui-collector-6fdf76dd49-d79rh replicaset-controller logger.go:42: 07:48:33 | production | 2025-03-10 07:47:51 +0000 UTC Normal Deployment.apps production-ui-collector ScalingReplicaSet Scaled up replica set production-ui-collector-6fdf76dd49 from 0 to 1 deployment-controller logger.go:42: 07:48:33 | production | 2025-03-10 07:47:51 +0000 UTC Normal Pod production-ui-query-7cc9bb488f-r9xqv Binding Scheduled Successfully assigned kuttl-test-suitable-ray/production-ui-query-7cc9bb488f-r9xqv to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:48:33 | production | 2025-03-10 07:47:51 +0000 UTC Normal ReplicaSet.apps production-ui-query-7cc9bb488f SuccessfulCreate Created pod: production-ui-query-7cc9bb488f-r9xqv replicaset-controller logger.go:42: 07:48:33 | production | 2025-03-10 07:47:51 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-7cc9bb488f from 0 to 1 deployment-controller logger.go:42: 07:48:33 | production | 2025-03-10 07:47:52 +0000 UTC Normal Pod production-ui-collector-6fdf76dd49-d79rh AddedInterface Add eth0 [10.129.2.91/23] from ovn-kubernetes multus logger.go:42: 07:48:33 | production | 2025-03-10 07:47:52 +0000 UTC Normal Pod production-ui-collector-6fdf76dd49-d79rh.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:40a1c7fa23aa5ffa64e8e22aa38022f5d4d7ff644c46a3da7169b713d486c3c1" already present on machine kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:47:52 +0000 UTC Normal Pod production-ui-collector-6fdf76dd49-d79rh.spec.containers{jaeger-collector} Created Created container: jaeger-collector kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:47:52 +0000 UTC Normal Pod production-ui-collector-6fdf76dd49-d79rh.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:47:52 +0000 UTC Normal Pod production-ui-query-7cc9bb488f-r9xqv AddedInterface Add eth0 [10.128.2.70/23] from ovn-kubernetes multus logger.go:42: 07:48:33 | production | 2025-03-10 07:47:52 +0000 UTC Normal Pod production-ui-query-7cc9bb488f-r9xqv.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:47:52 +0000 UTC Normal Pod production-ui-query-7cc9bb488f-r9xqv.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:47:52 +0000 UTC Normal Pod production-ui-query-7cc9bb488f-r9xqv.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:47:52 +0000 UTC Normal Pod production-ui-query-7cc9bb488f-r9xqv.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:47:52 +0000 UTC Normal Pod production-ui-query-7cc9bb488f-r9xqv.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:47:52 +0000 UTC Normal Pod production-ui-query-7cc9bb488f-r9xqv.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:47:52 +0000 UTC Normal Pod production-ui-query-7cc9bb488f-r9xqv.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:47:52 +0000 UTC Normal Pod production-ui-query-7cc9bb488f-r9xqv.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:47:52 +0000 UTC Normal Pod production-ui-query-7cc9bb488f-r9xqv.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:00 +0000 UTC Normal Pod production-ui-query-7cc9bb488f-r9xqv.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:00 +0000 UTC Normal Pod production-ui-query-7cc9bb488f-r9xqv.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:00 +0000 UTC Normal Pod production-ui-query-7cc9bb488f-r9xqv.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:00 +0000 UTC Normal ReplicaSet.apps production-ui-query-7cc9bb488f SuccessfulDelete Deleted pod: production-ui-query-7cc9bb488f-r9xqv replicaset-controller logger.go:42: 07:48:33 | production | 2025-03-10 07:48:00 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-7cc9bb488f from 1 to 0 deployment-controller logger.go:42: 07:48:33 | production | 2025-03-10 07:48:01 +0000 UTC Normal Pod production-ui-query-f4b67d76f-vtrlq Binding Scheduled Successfully assigned kuttl-test-suitable-ray/production-ui-query-f4b67d76f-vtrlq to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:48:33 | production | 2025-03-10 07:48:01 +0000 UTC Normal Pod production-ui-query-f4b67d76f-vtrlq AddedInterface Add eth0 [10.128.2.71/23] from ovn-kubernetes multus logger.go:42: 07:48:33 | production | 2025-03-10 07:48:01 +0000 UTC Normal Pod production-ui-query-f4b67d76f-vtrlq.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:01 +0000 UTC Normal Pod production-ui-query-f4b67d76f-vtrlq.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:01 +0000 UTC Normal Pod production-ui-query-f4b67d76f-vtrlq.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:01 +0000 UTC Normal Pod production-ui-query-f4b67d76f-vtrlq.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:234af927030921ab8f7333f61f967b4b4dee37a1b3cf85689e9e63240dd62800" already present on machine kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:01 +0000 UTC Normal ReplicaSet.apps production-ui-query-f4b67d76f SuccessfulCreate Created pod: production-ui-query-f4b67d76f-vtrlq replicaset-controller logger.go:42: 07:48:33 | production | 2025-03-10 07:48:01 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-f4b67d76f from 0 to 1 deployment-controller logger.go:42: 07:48:33 | production | 2025-03-10 07:48:02 +0000 UTC Normal Pod production-ui-query-f4b67d76f-vtrlq.spec.containers{oauth-proxy} Created Created container: oauth-proxy kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:02 +0000 UTC Normal Pod production-ui-query-f4b67d76f-vtrlq.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:02 +0000 UTC Normal Pod production-ui-query-f4b67d76f-vtrlq.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:02 +0000 UTC Normal Pod production-ui-query-f4b67d76f-vtrlq.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:02 +0000 UTC Normal Pod production-ui-query-f4b67d76f-vtrlq.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:08 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:48:33 | production | 2025-03-10 07:48:08 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod production-ui-collector-6fdf76dd49-d79rh horizontal-pod-autoscaler logger.go:42: 07:48:33 | production | 2025-03-10 07:48:08 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:48:33 | production | 2025-03-10 07:48:14 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-f4b67d76f from 1 to 0 deployment-controller logger.go:42: 07:48:33 | production | 2025-03-10 07:48:15 +0000 UTC Normal Pod production-ui-query-f4b67d76f-vtrlq.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:15 +0000 UTC Normal Pod production-ui-query-f4b67d76f-vtrlq.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:15 +0000 UTC Normal Pod production-ui-query-f4b67d76f-vtrlq.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:15 +0000 UTC Normal ReplicaSet.apps production-ui-query-f4b67d76f SuccessfulDelete Deleted pod: production-ui-query-f4b67d76f-vtrlq replicaset-controller logger.go:42: 07:48:33 | production | 2025-03-10 07:48:16 +0000 UTC Normal Pod production-ui-query-686c99dc54-n5stq Binding Scheduled Successfully assigned kuttl-test-suitable-ray/production-ui-query-686c99dc54-n5stq to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:48:33 | production | 2025-03-10 07:48:16 +0000 UTC Normal Pod production-ui-query-686c99dc54-n5stq AddedInterface Add eth0 [10.128.2.72/23] from ovn-kubernetes multus logger.go:42: 07:48:33 | production | 2025-03-10 07:48:16 +0000 UTC Normal Pod production-ui-query-686c99dc54-n5stq.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:16 +0000 UTC Normal ReplicaSet.apps production-ui-query-686c99dc54 SuccessfulCreate Created pod: production-ui-query-686c99dc54-n5stq replicaset-controller logger.go:42: 07:48:33 | production | 2025-03-10 07:48:16 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-686c99dc54 from 0 to 1 deployment-controller logger.go:42: 07:48:33 | production | 2025-03-10 07:48:17 +0000 UTC Normal Pod production-ui-query-686c99dc54-n5stq.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:17 +0000 UTC Normal Pod production-ui-query-686c99dc54-n5stq.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:17 +0000 UTC Normal Pod production-ui-query-686c99dc54-n5stq.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:17 +0000 UTC Normal Pod production-ui-query-686c99dc54-n5stq.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:17 +0000 UTC Normal Pod production-ui-query-686c99dc54-n5stq.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:23 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod production-ui-collector-6fdf76dd49-d79rh horizontal-pod-autoscaler logger.go:42: 07:48:33 | production | 2025-03-10 07:48:23 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod production-ui-collector-6fdf76dd49-d79rh horizontal-pod-autoscaler logger.go:42: 07:48:33 | production | 2025-03-10 07:48:26 +0000 UTC Normal Pod production-ui-query-686c99dc54-n5stq.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:26 +0000 UTC Normal Pod production-ui-query-686c99dc54-n5stq.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:26 +0000 UTC Normal ReplicaSet.apps production-ui-query-686c99dc54 SuccessfulDelete Deleted pod: production-ui-query-686c99dc54-n5stq replicaset-controller logger.go:42: 07:48:33 | production | 2025-03-10 07:48:26 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-686c99dc54 from 1 to 0 deployment-controller logger.go:42: 07:48:33 | production | 2025-03-10 07:48:27 +0000 UTC Normal Pod production-ui-query-54b95dfd79-76g4p Binding Scheduled Successfully assigned kuttl-test-suitable-ray/production-ui-query-54b95dfd79-76g4p to ip-10-0-101-63.us-east-2.compute.internal default-scheduler logger.go:42: 07:48:33 | production | 2025-03-10 07:48:27 +0000 UTC Normal Pod production-ui-query-54b95dfd79-76g4p AddedInterface Add eth0 [10.128.2.73/23] from ovn-kubernetes multus logger.go:42: 07:48:33 | production | 2025-03-10 07:48:27 +0000 UTC Normal Pod production-ui-query-54b95dfd79-76g4p.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:84020ce29bcb5a4bd018e6596188ae919c5cd600e08f78a546c0e76ea477685e" already present on machine kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:27 +0000 UTC Normal ReplicaSet.apps production-ui-query-54b95dfd79 SuccessfulCreate Created pod: production-ui-query-54b95dfd79-76g4p replicaset-controller logger.go:42: 07:48:33 | production | 2025-03-10 07:48:27 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-54b95dfd79 from 0 to 1 deployment-controller logger.go:42: 07:48:33 | production | 2025-03-10 07:48:28 +0000 UTC Normal Pod production-ui-query-54b95dfd79-76g4p.spec.containers{jaeger-query} Created Created container: jaeger-query kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:28 +0000 UTC Normal Pod production-ui-query-54b95dfd79-76g4p.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:28 +0000 UTC Normal Pod production-ui-query-54b95dfd79-76g4p.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:65342c7d622f336741a8ae0b9dead79c7ecabc155d02cdd42b7b49ca36680e74" already present on machine kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:28 +0000 UTC Normal Pod production-ui-query-54b95dfd79-76g4p.spec.containers{jaeger-agent} Created Created container: jaeger-agent kubelet logger.go:42: 07:48:33 | production | 2025-03-10 07:48:28 +0000 UTC Normal Pod production-ui-query-54b95dfd79-76g4p.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:48:33 | production | Deleting namespace: kuttl-test-suitable-ray === CONT kuttl/harness/artifacts logger.go:42: 07:48:39 | artifacts | Creating namespace: kuttl-test-amusing-grubworm logger.go:42: 07:48:39 | artifacts | artifacts events from ns kuttl-test-amusing-grubworm: logger.go:42: 07:48:39 | artifacts | Deleting namespace: kuttl-test-amusing-grubworm === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (146.63s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/allinone (57.77s) --- PASS: kuttl/harness/production (82.70s) --- PASS: kuttl/harness/artifacts (6.01s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name ui --report --output /logs/artifacts/ui.xml ./artifacts/kuttl-report.xml time="2025-03-10T07:48:46Z" level=debug msg="Setting a new name for the test suites" time="2025-03-10T07:48:46Z" level=debug msg="Removing 'artifacts' TestCase" time="2025-03-10T07:48:46Z" level=debug msg="normalizing test case names" time="2025-03-10T07:48:46Z" level=debug msg="ui/allinone -> ui_allinone" time="2025-03-10T07:48:46Z" level=debug msg="ui/production -> ui_production" time="2025-03-10T07:48:46Z" level=debug msg="ui/artifacts -> ui_artifacts" +---------------+--------+ | NAME | RESULT | +---------------+--------+ | ui_allinone | passed | | ui_production | passed | | ui_artifacts | passed | +---------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh upgrade false true + '[' 3 -ne 3 ']' + test_suite_name=upgrade + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/upgrade.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-upgrade make[2]: Entering directory '/tmp/jaeger-tests' make docker JAEGER_VERSION=1.65.1 IMG="quay.io//jaeger-operator:next" make[3]: Entering directory '/tmp/jaeger-tests' [ ! -z "true" ] || docker build --build-arg=GOPROXY= --build-arg=VERSION="1.65.0" --build-arg=JAEGER_VERSION=1.65.1 --build-arg=JAEGER_AGENT_VERSION="1.62.0" --build-arg=TARGETARCH= --build-arg VERSION_DATE=2025-03-10T07:48:46Z --build-arg VERSION_PKG="github.com/jaegertracing/jaeger-operator/pkg/version" -t "quay.io//jaeger-operator:next" . make[3]: Leaving directory '/tmp/jaeger-tests' touch build-e2e-upgrade-image SKIP_ES_EXTERNAL=true IMG=quay.io//jaeger-operator:"1.65.0" JAEGER_OPERATOR_VERSION="1.65.0" JAEGER_VERSION="1.65.0" ./tests/e2e/upgrade/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.19.0-0.nightly-2025-03-09-063419 True False 52m Cluster version is 4.19.0-0.nightly-2025-03-09-063419' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.19.0-0.nightly-2025-03-09-063419 True False 52m Cluster version is 4.19.0-0.nightly-2025-03-09-063419' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/upgrade/render.sh ++ export SUITE_DIR=./tests/e2e/upgrade ++ SUITE_DIR=./tests/e2e/upgrade ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/upgrade ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + export JAEGER_NAME + '[' true = true ']' + skip_test upgrade 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade + warning 'upgrade: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade: Test not supported in OpenShift\e[0m' WAR: upgrade: Test not supported in OpenShift + '[' true = true ']' + skip_test upgrade-from-latest-release 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade-from-latest-release + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade-from-latest-release + warning 'upgrade-from-latest-release: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade-from-latest-release: Test not supported in OpenShift\e[0m' WAR: upgrade-from-latest-release: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running upgrade E2E tests' Running upgrade E2E tests + cd tests/e2e/upgrade/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3414875983 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-9xj46nm0-d238c.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 07:48:47 | artifacts | Creating namespace: kuttl-test-robust-dolphin logger.go:42: 07:48:47 | artifacts | artifacts events from ns kuttl-test-robust-dolphin: logger.go:42: 07:48:47 | artifacts | Deleting namespace: kuttl-test-robust-dolphin === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (6.04s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.90s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name upgrade --report --output /logs/artifacts/upgrade.xml ./artifacts/kuttl-report.xml time="2025-03-10T07:48:54Z" level=debug msg="Setting a new name for the test suites" time="2025-03-10T07:48:54Z" level=debug msg="Removing 'artifacts' TestCase" time="2025-03-10T07:48:54Z" level=debug msg="normalizing test case names" time="2025-03-10T07:48:54Z" level=debug msg="upgrade/artifacts -> upgrade_artifacts" +-------------------+--------+ | NAME | RESULT | +-------------------+--------+ | upgrade_artifacts | passed | +-------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests'