% Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 3831 100 3831 0 0 26424 0 --:--:-- --:--:-- --:--:-- 26604 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 2144 100 2144 0 0 23957 0 --:--:-- --:--:-- --:--:-- 24089 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 350 100 350 0 0 4034 0 --:--:-- --:--:-- --:--:-- 4022 100 350 100 350 0 0 4031 0 --:--:-- --:--:-- --:--:-- 4022 Installing kuttl Try 0... curl -sLo /tmp/jaeger-tests/hack/install/../../bin/kubectl-kuttl https://github.com/kudobuilder/kuttl/releases/download/v0.15.0/kubectl-kuttl_0.15.0_linux_x86_64 KUBECONFIG file is: /tmp/kubeconfig-894131460 for suite in elasticsearch examples generate miscellaneous sidecar streaming ui upgrade; do \ make run-e2e-tests-$suite ; \ done make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh elasticsearch false true + '[' 3 -ne 3 ']' + test_suite_name=elasticsearch + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/elasticsearch.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-elasticsearch make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true \ KAFKA_VERSION=0.32.0 \ SKIP_KAFKA=false \ ./tests/e2e/elasticsearch/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-08-062604 True False 8m26s Cluster version is 4.15.0-0.nightly-2023-11-08-062604' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-08-062604 True False 8m26s Cluster version is 4.15.0-0.nightly-2023-11-08-062604' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 0.32.0 ']' ++ version_le 0.32.0 0.25.0 +++ echo 0.32.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 0.32.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/elasticsearch/render.sh ++ export SUITE_DIR=./tests/e2e/elasticsearch ++ SUITE_DIR=./tests/e2e/elasticsearch ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/elasticsearch ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + start_test es-from-aio-to-production + '[' 1 -ne 1 ']' + test_name=es-from-aio-to-production + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-from-aio-to-production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-from-aio-to-production\e[0m' Rendering files for test es-from-aio-to-production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-from-aio-to-production + cd es-from-aio-to-production + jaeger_name=my-jaeger + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 03 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=03 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./03-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch.redundancyPolicy="ZeroRedundancy"' ./03-install.yaml + render_smoke_test my-jaeger true 04 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=04 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./04-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test es-increasing-replicas + '[' 1 -ne 1 ']' + test_name=es-increasing-replicas + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-increasing-replicas' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-increasing-replicas\e[0m' Rendering files for test es-increasing-replicas + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-from-aio-to-production + '[' es-from-aio-to-production '!=' _build ']' + cd .. + mkdir -p es-increasing-replicas + cd es-increasing-replicas + jaeger_name=simple-prod + '[' true = true ']' + jaeger_deployment_mode=production_autoprovisioned + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.replicas=2 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.query.replicas=2 ./02-install.yaml + cp ./01-assert.yaml ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.replicas=2 ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .status.readyReplicas=2 ./02-assert.yaml + render_smoke_test simple-prod true 03 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=03 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./03-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + cp ./02-install.yaml ./04-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.elasticsearch.nodeCount=2 ./04-install.yaml + /tmp/jaeger-tests/bin/gomplate -f ./openshift-check-es-nodes.yaml.template -o ./05-check-es-nodes.yaml + '[' true = true ']' + skip_test es-index-cleaner-upstream 'SKIP_ES_EXTERNAL is true' + '[' 2 -ne 2 ']' + test_name=es-index-cleaner-upstream + message='SKIP_ES_EXTERNAL is true' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-increasing-replicas + '[' es-increasing-replicas '!=' _build ']' + cd .. + rm -rf es-index-cleaner-upstream + warning 'es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true\e[0m' WAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true + '[' true = true ']' + es_index_cleaner -autoprov production_autoprovisioned + '[' 2 -ne 2 ']' + postfix=-autoprov + jaeger_deployment_strategy=production_autoprovisioned + start_test es-index-cleaner-autoprov + '[' 1 -ne 1 ']' + test_name=es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-index-cleaner-autoprov' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-index-cleaner-autoprov\e[0m' Rendering files for test es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-index-cleaner-autoprov + cd es-index-cleaner-autoprov + jaeger_name=test-es-index-cleaner-with-prefix + cronjob_name=test-es-index-cleaner-with-prefix-es-index-cleaner + secured_es_connection=false + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_managed_es ']' + ELASTICSEARCH_URL=https://elasticsearch + secured_es_connection=true + cp ../../es-index-cleaner-upstream/04-assert.yaml ../../es-index-cleaner-upstream/README.md . + render_install_jaeger test-es-index-cleaner-with-prefix production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options.es.index-prefix=""' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.enabled=false ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.numberOfDays=0 ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.esIndexCleaner.schedule="*/1 * * * *"' ./01-install.yaml + render_report_spans test-es-index-cleaner-with-prefix true 5 00 true 02 + '[' 6 -ne 6 ']' + jaeger=test-es-index-cleaner-with-prefix + is_secured=true + number_of_spans=5 + job_number=00 + ensure_reported_spans=true + test_step=02 + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + export JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + export JOB_NUMBER=00 + JOB_NUMBER=00 + export DAYS=5 + DAYS=5 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./02-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./02-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + sed 's~enabled: false~enabled: true~gi' ./01-install.yaml + CRONJOB_NAME=test-es-index-cleaner-with-prefix-es-index-cleaner + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/wait-for-cronjob-execution.yaml.template -o ./04-wait-es-index-cleaner.yaml + /tmp/jaeger-tests/bin/gomplate -f ./01-install.yaml -o ./05-install.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' 00 06 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + job_number=00 + test_step=06 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=test-es-index-cleaner-with-prefix-curator + JOB_NUMBER=00 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + MOUNT_SECRET=test-es-index-cleaner-with-prefix-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./06-check-indices.yaml + JOB_NUMBER=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./06-assert.yaml + '[' true = true ']' + get_elasticsearch_openshift_operator_version + export ESO_OPERATOR_VERSION + '[' true = true ']' ++ kubectl get pods -l name=elasticsearch-operator --all-namespaces '-o=jsonpath={.items[0].metadata.annotations.operatorframework\.io/properties}' + properties='{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.7"}}]}' + '[' -z '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.7"}}]}' ']' ++ echo '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.7"}}]}' ++ /tmp/jaeger-tests/bin/yq e -P '.properties.XX | select(.value.packageName == "elasticsearch-operator") | .value.version' + ESO_OPERATOR_VERSION=5.7.7 ++ version_ge 5.7.7 5.4 +++ echo 5.7.7 5.4 +++ tr ' ' '\n' +++ sort -rV +++ head -n 1 ++ test 5.7.7 == 5.7.7 + '[' -n '' ']' + skip_test es-index-cleaner-managed 'Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 2 -ne 2 ']' + test_name=es-index-cleaner-managed + message='Test only supported with Elasticsearch OpenShift Operator >= 5.4' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-index-cleaner-autoprov + '[' es-index-cleaner-autoprov '!=' _build ']' + cd .. + rm -rf es-index-cleaner-managed + warning 'es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4\e[0m' WAR: es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4 + '[' true = true ']' + start_test es-multiinstance + '[' 1 -ne 1 ']' + test_name=es-multiinstance + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-multiinstance' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-multiinstance\e[0m' Rendering files for test es-multiinstance + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-multiinstance + cd es-multiinstance + jaeger_name=instance-1 + render_install_jaeger instance-1 production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=instance-1 + JAEGER_NAME=instance-1 + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/gomplate -f ./03-create-second-instance.yaml.template -o 03-create-second-instance.yaml + '[' true = true ']' + skip_test es-rollover-upstream 'SKIP_ES_EXTERNAL is true' + '[' 2 -ne 2 ']' + test_name=es-rollover-upstream + message='SKIP_ES_EXTERNAL is true' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-multiinstance + '[' es-multiinstance '!=' _build ']' + cd .. + rm -rf es-rollover-upstream + warning 'es-rollover-upstream: SKIP_ES_EXTERNAL is true' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-rollover-upstream: SKIP_ES_EXTERNAL is true\e[0m' WAR: es-rollover-upstream: SKIP_ES_EXTERNAL is true + '[' true = true ']' + es_rollover -autoprov production_autoprovisioned + '[' 2 -ne 2 ']' + postfix=-autoprov + jaeger_deployment_strategy=production_autoprovisioned + start_test es-rollover-autoprov + '[' 1 -ne 1 ']' + test_name=es-rollover-autoprov + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-rollover-autoprov' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-rollover-autoprov\e[0m' Rendering files for test es-rollover-autoprov + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-rollover-autoprov + cd es-rollover-autoprov + cp ../../es-rollover-upstream/05-assert.yaml ../../es-rollover-upstream/05-install.yaml ../../es-rollover-upstream/README.md . + jaeger_name=my-jaeger + secured_es_connection=false + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_managed_es ']' + ELASTICSEARCH_URL=https://elasticsearch + secured_es_connection=true + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_report_spans my-jaeger true 2 00 true 02 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=00 + ensure_reported_spans=true + test_step=02 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=00 + JOB_NUMBER=00 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./02-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./02-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' 00 03 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + job_number=00 + test_step=03 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=00 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./03-check-indices.yaml + JOB_NUMBER=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./03-assert.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' 01 04 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + job_number=01 + test_step=04 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=01 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./04-check-indices.yaml + JOB_NUMBER=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./04-assert.yaml + render_report_spans my-jaeger true 2 02 true 06 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=02 + ensure_reported_spans=true + test_step=06 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=02 + JOB_NUMBER=02 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./06-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./06-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' 02 07 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + job_number=02 + test_step=07 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=02 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./07-check-indices.yaml + JOB_NUMBER=02 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./07-assert.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' 03 08 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + job_number=03 + test_step=08 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=03 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./08-check-indices.yaml + JOB_NUMBER=03 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./08-assert.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' 04 09 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + job_number=04 + test_step=09 + escape_command ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=04 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./09-check-indices.yaml + JOB_NUMBER=04 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./09-assert.yaml + render_report_spans my-jaeger true 2 03 true 10 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=03 + ensure_reported_spans=true + test_step=10 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=03 + JOB_NUMBER=03 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./10-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./10-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + CRONJOB_NAME=my-jaeger-es-rollover + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/wait-for-cronjob-execution.yaml.template -o ./11-wait-rollover.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-000002'\'',' 05 11 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-000002'\'',' + job_number=05 + test_step=11 + escape_command ''\''--name'\'', '\''jaeger-span-000002'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-000002'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-000002'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-000002'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-000002'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=05 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-000002'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./11-check-indices.yaml + JOB_NUMBER=05 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./11-assert.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' 06 12 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + job_number=06 + test_step=12 + escape_command ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=06 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./12-check-indices.yaml + JOB_NUMBER=06 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./12-assert.yaml + '[' true = true ']' + get_elasticsearch_openshift_operator_version + export ESO_OPERATOR_VERSION + '[' true = true ']' ++ kubectl get pods -l name=elasticsearch-operator --all-namespaces '-o=jsonpath={.items[0].metadata.annotations.operatorframework\.io/properties}' + properties='{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.7"}}]}' + '[' -z '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.7"}}]}' ']' ++ echo '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.7"}}]}' ++ /tmp/jaeger-tests/bin/yq e -P '.properties.XX | select(.value.packageName == "elasticsearch-operator") | .value.version' + ESO_OPERATOR_VERSION=5.7.7 ++ version_ge 5.7.7 5.4 +++ echo 5.7.7 5.4 +++ tr ' ' '\n' +++ sort -rV +++ head -n 1 ++ test 5.7.7 == 5.7.7 + '[' -n '' ']' + skip_test es-rollover-managed 'Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 2 -ne 2 ']' + test_name=es-rollover-managed + message='Test only supported with Elasticsearch OpenShift Operator >= 5.4' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-rollover-autoprov + '[' es-rollover-autoprov '!=' _build ']' + cd .. + rm -rf es-rollover-managed + warning 'es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4\e[0m' WAR: es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4 + '[' true = true ']' + skip_test es-spark-dependencies 'This test is not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=es-spark-dependencies + message='This test is not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + rm -rf es-spark-dependencies + warning 'es-spark-dependencies: This test is not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-spark-dependencies: This test is not supported in OpenShift\e[0m' WAR: es-spark-dependencies: This test is not supported in OpenShift + [[ true = true ]] + [[ false = false ]] + start_test es-streaming-autoprovisioned + '[' 1 -ne 1 ']' + test_name=es-streaming-autoprovisioned + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-streaming-autoprovisioned' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-streaming-autoprovisioned\e[0m' Rendering files for test es-streaming-autoprovisioned + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-streaming-autoprovisioned + cd es-streaming-autoprovisioned + jaeger_name=auto-provisioned + render_assert_kafka true auto-provisioned 00 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provisioned + test_step=00 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].envXX | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].envXX | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].envXX | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./1-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_smoke_test auto-provisioned true 04 + '[' 3 -ne 3 ']' + jaeger=auto-provisioned + is_secured=true + test_step=04 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://auto-provisioned-query:443 + JAEGER_QUERY_ENDPOINT=https://auto-provisioned-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://auto-provisioned-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://auto-provisioned-collector-headless:14268 + export JAEGER_NAME=auto-provisioned + JAEGER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./04-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running elasticsearch E2E tests' Running elasticsearch E2E tests + cd tests/e2e/elasticsearch/_build + set +e + KUBECONFIG=/tmp/kubeconfig-894131460 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 8 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/es-from-aio-to-production === PAUSE kuttl/harness/es-from-aio-to-production === RUN kuttl/harness/es-increasing-replicas === PAUSE kuttl/harness/es-increasing-replicas === RUN kuttl/harness/es-index-cleaner-autoprov === PAUSE kuttl/harness/es-index-cleaner-autoprov === RUN kuttl/harness/es-multiinstance === PAUSE kuttl/harness/es-multiinstance === RUN kuttl/harness/es-rollover-autoprov === PAUSE kuttl/harness/es-rollover-autoprov === RUN kuttl/harness/es-simple-prod === PAUSE kuttl/harness/es-simple-prod === RUN kuttl/harness/es-streaming-autoprovisioned === PAUSE kuttl/harness/es-streaming-autoprovisioned === CONT kuttl/harness/artifacts logger.go:42: 07:57:16 | artifacts | Creating namespace: kuttl-test-enough-oryx logger.go:42: 07:57:16 | artifacts | artifacts events from ns kuttl-test-enough-oryx: logger.go:42: 07:57:16 | artifacts | Deleting namespace: kuttl-test-enough-oryx === CONT kuttl/harness/es-multiinstance logger.go:42: 07:57:22 | es-multiinstance | Ignoring 03-create-second-instance.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:57:22 | es-multiinstance | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:57:22 | es-multiinstance | Creating namespace: kuttl-test-trusted-anchovy logger.go:42: 07:57:22 | es-multiinstance/0-clear-namespace | starting test step 0-clear-namespace logger.go:42: 07:57:22 | es-multiinstance/0-clear-namespace | running command: [sh -c kubectl delete namespace jaeger-e2e-multiinstance-test --ignore-not-found=true] logger.go:42: 07:57:22 | es-multiinstance/0-clear-namespace | test step completed 0-clear-namespace logger.go:42: 07:57:22 | es-multiinstance/1-install | starting test step 1-install logger.go:42: 07:57:22 | es-multiinstance/1-install | Jaeger:kuttl-test-trusted-anchovy/instance-1 created logger.go:42: 07:58:11 | es-multiinstance/1-install | test step completed 1-install logger.go:42: 07:58:11 | es-multiinstance/2-create-namespace | starting test step 2-create-namespace logger.go:42: 07:58:11 | es-multiinstance/2-create-namespace | running command: [sh -c kubectl create namespace jaeger-e2e-multiinstance-test] logger.go:42: 07:58:11 | es-multiinstance/2-create-namespace | namespace/jaeger-e2e-multiinstance-test created logger.go:42: 07:58:11 | es-multiinstance/2-create-namespace | test step completed 2-create-namespace logger.go:42: 07:58:11 | es-multiinstance/3-create-second-instance | starting test step 3-create-second-instance logger.go:42: 07:58:11 | es-multiinstance/3-create-second-instance | running command: [sh -c kubectl apply -f ./01-install.yaml -n jaeger-e2e-multiinstance-test] logger.go:42: 07:58:13 | es-multiinstance/3-create-second-instance | jaeger.jaegertracing.io/instance-1 created logger.go:42: 07:58:13 | es-multiinstance/3-create-second-instance | running command: [sh -c /tmp/jaeger-tests/bin/kubectl-kuttl assert ./01-assert.yaml -n jaeger-e2e-multiinstance-test --timeout 1000] logger.go:42: 07:59:02 | es-multiinstance/3-create-second-instance | assert is valid logger.go:42: 07:59:02 | es-multiinstance/3-create-second-instance | test step completed 3-create-second-instance logger.go:42: 07:59:02 | es-multiinstance/4-check-secrets | starting test step 4-check-secrets logger.go:42: 07:59:02 | es-multiinstance/4-check-secrets | running command: [sh -c kubectl get secrets elasticsearch -o jsonpath='{.data.logging-es\.crt}' -n $NAMESPACE > secret1] logger.go:42: 07:59:02 | es-multiinstance/4-check-secrets | running command: [sh -c kubectl get secrets elasticsearch -o jsonpath='{.data.logging-es\.crt}' -n jaeger-e2e-multiinstance-test > secret2] logger.go:42: 07:59:02 | es-multiinstance/4-check-secrets | running command: [sh -c cmp --silent secret1 secret2 || exit 0] logger.go:42: 07:59:02 | es-multiinstance/4-check-secrets | test step completed 4-check-secrets logger.go:42: 07:59:02 | es-multiinstance/5-delete | starting test step 5-delete logger.go:42: 07:59:02 | es-multiinstance/5-delete | running command: [sh -c kubectl delete namespace jaeger-e2e-multiinstance-test --wait=false] logger.go:42: 07:59:02 | es-multiinstance/5-delete | namespace "jaeger-e2e-multiinstance-test" deleted logger.go:42: 07:59:02 | es-multiinstance/5-delete | test step completed 5-delete logger.go:42: 07:59:02 | es-multiinstance | es-multiinstance events from ns kuttl-test-trusted-anchovy: logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:57:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttrustedanchovyinstance1-1-686494h5vl Binding Scheduled Successfully assigned kuttl-test-trusted-anchovy/elasticsearch-cdm-kuttltesttrustedanchovyinstance1-1-686494h5vl to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:57:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttrustedanchovyinstance1-1-686494h5vl AddedInterface Add eth0 [10.128.2.21/23] from ovn-kubernetes logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:57:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttrustedanchovyinstance1-1-686494h5vl.spec.containers{elasticsearch} Pulling Pulling image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:57:29 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltesttrustedanchovyinstance1-1-68649fd55b SuccessfulCreate Created pod: elasticsearch-cdm-kuttltesttrustedanchovyinstance1-1-686494h5vl replicaset-controller logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:57:29 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltesttrustedanchovyinstance1-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltesttrustedanchovyinstance1-1-68649fd55b to 1 deployment-controller logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:57:36 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttrustedanchovyinstance1-1-686494h5vl.spec.containers{elasticsearch} Pulled Successfully pulled image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" in 6.174s (6.174s including waiting) kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:57:36 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttrustedanchovyinstance1-1-686494h5vl.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:57:36 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttrustedanchovyinstance1-1-686494h5vl.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:57:36 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttrustedanchovyinstance1-1-686494h5vl.spec.containers{proxy} Pulling Pulling image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:57:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttrustedanchovyinstance1-1-686494h5vl.spec.containers{proxy} Pulled Successfully pulled image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" in 3.23s (3.23s including waiting) kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:57:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttrustedanchovyinstance1-1-686494h5vl.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:57:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttrustedanchovyinstance1-1-686494h5vl.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:57:50 +0000 UTC Warning Pod elasticsearch-cdm-kuttltesttrustedanchovyinstance1-1-686494h5vl.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:00 +0000 UTC Normal Pod instance-1-collector-899cdd544-r9ldb Binding Scheduled Successfully assigned kuttl-test-trusted-anchovy/instance-1-collector-899cdd544-r9ldb to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:00 +0000 UTC Normal ReplicaSet.apps instance-1-collector-899cdd544 SuccessfulCreate Created pod: instance-1-collector-899cdd544-r9ldb replicaset-controller logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:00 +0000 UTC Normal Deployment.apps instance-1-collector ScalingReplicaSet Scaled up replica set instance-1-collector-899cdd544 to 1 deployment-controller logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:00 +0000 UTC Normal Pod instance-1-query-7678c98cc-mh5kq Binding Scheduled Successfully assigned kuttl-test-trusted-anchovy/instance-1-query-7678c98cc-mh5kq to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:00 +0000 UTC Normal ReplicaSet.apps instance-1-query-7678c98cc SuccessfulCreate Created pod: instance-1-query-7678c98cc-mh5kq replicaset-controller logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:00 +0000 UTC Normal Deployment.apps instance-1-query ScalingReplicaSet Scaled up replica set instance-1-query-7678c98cc to 1 deployment-controller logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:01 +0000 UTC Normal Pod instance-1-collector-899cdd544-r9ldb AddedInterface Add eth0 [10.131.0.23/23] from ovn-kubernetes logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:01 +0000 UTC Normal Pod instance-1-collector-899cdd544-r9ldb.spec.containers{jaeger-collector} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:01 +0000 UTC Normal Pod instance-1-query-7678c98cc-mh5kq AddedInterface Add eth0 [10.129.2.15/23] from ovn-kubernetes logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:01 +0000 UTC Normal Pod instance-1-query-7678c98cc-mh5kq.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:06 +0000 UTC Normal Pod instance-1-collector-899cdd544-r9ldb.spec.containers{jaeger-collector} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" in 4.77s (4.77s including waiting) kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:06 +0000 UTC Normal Pod instance-1-collector-899cdd544-r9ldb.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:06 +0000 UTC Normal Pod instance-1-collector-899cdd544-r9ldb.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:06 +0000 UTC Normal Pod instance-1-query-7678c98cc-mh5kq.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" in 5.071s (5.071s including waiting) kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:06 +0000 UTC Normal Pod instance-1-query-7678c98cc-mh5kq.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:06 +0000 UTC Normal Pod instance-1-query-7678c98cc-mh5kq.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:06 +0000 UTC Normal Pod instance-1-query-7678c98cc-mh5kq.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:06 +0000 UTC Normal Pod instance-1-query-7678c98cc-mh5kq.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:06 +0000 UTC Normal Pod instance-1-query-7678c98cc-mh5kq.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:06 +0000 UTC Normal Pod instance-1-query-7678c98cc-mh5kq.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:08 +0000 UTC Normal Pod instance-1-query-7678c98cc-mh5kq.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" in 2.189s (2.189s including waiting) kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:08 +0000 UTC Normal Pod instance-1-query-7678c98cc-mh5kq.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:08 +0000 UTC Normal Pod instance-1-query-7678c98cc-mh5kq.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:27 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:28 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:28 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:43 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:43 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:59:02 | es-multiinstance | 2023-11-13 07:58:58 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod instance-1-collector-899cdd544-r9ldb horizontal-pod-autoscaler logger.go:42: 07:59:02 | es-multiinstance | Deleting namespace: kuttl-test-trusted-anchovy === CONT kuttl/harness/es-streaming-autoprovisioned logger.go:42: 07:59:09 | es-streaming-autoprovisioned | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:59:09 | es-streaming-autoprovisioned | Creating namespace: kuttl-test-sensible-owl logger.go:42: 07:59:09 | es-streaming-autoprovisioned/0-install | starting test step 0-install logger.go:42: 07:59:09 | es-streaming-autoprovisioned/0-install | Jaeger:kuttl-test-sensible-owl/auto-provisioned created logger.go:42: 08:00:40 | es-streaming-autoprovisioned/0-install | test step completed 0-install logger.go:42: 08:00:40 | es-streaming-autoprovisioned/1- | starting test step 1- logger.go:42: 08:01:10 | es-streaming-autoprovisioned/1- | test step completed 1- logger.go:42: 08:01:10 | es-streaming-autoprovisioned/2- | starting test step 2- logger.go:42: 08:01:52 | es-streaming-autoprovisioned/2- | test step completed 2- logger.go:42: 08:01:52 | es-streaming-autoprovisioned/3- | starting test step 3- logger.go:42: 08:01:58 | es-streaming-autoprovisioned/3- | test step completed 3- logger.go:42: 08:01:58 | es-streaming-autoprovisioned/4-smoke-test | starting test step 4-smoke-test logger.go:42: 08:01:58 | es-streaming-autoprovisioned/4-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE auto-provisioned /dev/null] logger.go:42: 08:02:01 | es-streaming-autoprovisioned/4-smoke-test | Warning: resource jaegers/auto-provisioned is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:02:08 | es-streaming-autoprovisioned/4-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JAEGER_COLLECTOR_ENDPOINT=http://auto-provisioned-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://auto-provisioned-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:02:08 | es-streaming-autoprovisioned/4-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:02:10 | es-streaming-autoprovisioned/4-smoke-test | job.batch/report-span created logger.go:42: 08:02:10 | es-streaming-autoprovisioned/4-smoke-test | job.batch/check-span created logger.go:42: 08:02:25 | es-streaming-autoprovisioned/4-smoke-test | test step completed 4-smoke-test logger.go:42: 08:02:25 | es-streaming-autoprovisioned | es-streaming-autoprovisioned events from ns kuttl-test-sensible-owl: logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 07:59:33 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestsensibleowlautoprovisioned-1-cfb879d96 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestsensibleowlautoprovisioned-1-cftdzr7 replicaset-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 07:59:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensibleowlautoprovisioned-1-cftdzr7 Binding Scheduled Successfully assigned kuttl-test-sensible-owl/elasticsearch-cdm-kuttltestsensibleowlautoprovisioned-1-cftdzr7 to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 07:59:33 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestsensibleowlautoprovisioned-1-cftdzr7 FailedMount MountVolume.SetUp failed for volume "elasticsearch-metrics" : secret "elasticsearch-metrics" not found kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 07:59:33 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestsensibleowlautoprovisioned-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestsensibleowlautoprovisioned-1-cfb879d96 to 1 deployment-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 07:59:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensibleowlautoprovisioned-1-cftdzr7 AddedInterface Add eth0 [10.128.2.22/23] from ovn-kubernetes logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 07:59:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensibleowlautoprovisioned-1-cftdzr7.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 07:59:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensibleowlautoprovisioned-1-cftdzr7.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 07:59:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensibleowlautoprovisioned-1-cftdzr7.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 07:59:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensibleowlautoprovisioned-1-cftdzr7.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 07:59:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensibleowlautoprovisioned-1-cftdzr7.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 07:59:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsensibleowlautoprovisioned-1-cftdzr7.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 07:59:44 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestsensibleowlautoprovisioned-1-cftdzr7.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 07:59:49 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestsensibleowlautoprovisioned-1-cftdzr7.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:02 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:03 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-zookeeper NoPods No matching pods found controllermanager logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:03 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-sensible-owl/data-auto-provisioned-zookeeper-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-7bf58cbfc9-shhxm_a95d5e43-dac0-4be3-9233-4b6d6f41a8b3 logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:03 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:06 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-9bebdd21-e294-44e2-87a7-e252dba2df12 ebs.csi.aws.com_aws-ebs-csi-driver-controller-7bf58cbfc9-shhxm_a95d5e43-dac0-4be3-9233-4b6d6f41a8b3 logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:07 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-sensible-owl/auto-provisioned-zookeeper-0 to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:09 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-9bebdd21-e294-44e2-87a7-e252dba2df12" attachdetach-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:10 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 AddedInterface Add eth0 [10.129.2.19/23] from ovn-kubernetes logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:10 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulling Pulling image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:20 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulled Successfully pulled image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" in 10.323s (10.323s including waiting) kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:21 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:21 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:41 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:42 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-kafka NoPods No matching pods found controllermanager logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:42 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-sensible-owl/data-0-auto-provisioned-kafka-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-7bf58cbfc9-shhxm_a95d5e43-dac0-4be3-9233-4b6d6f41a8b3 logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:42 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:45 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-a93c7af3-cfa2-4dfc-bf03-08eef8b1a89f ebs.csi.aws.com_aws-ebs-csi-driver-controller-7bf58cbfc9-shhxm_a95d5e43-dac0-4be3-9233-4b6d6f41a8b3 logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:46 +0000 UTC Normal Pod auto-provisioned-kafka-0 Binding Scheduled Successfully assigned kuttl-test-sensible-owl/auto-provisioned-kafka-0 to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:48 +0000 UTC Normal Pod auto-provisioned-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-a93c7af3-cfa2-4dfc-bf03-08eef8b1a89f" attachdetach-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:49 +0000 UTC Normal Pod auto-provisioned-kafka-0 AddedInterface Add eth0 [10.129.2.20/23] from ovn-kubernetes logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:49 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" already present on machine kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:49 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:00:49 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:10 +0000 UTC Normal ReplicaSet.apps auto-provisioned-entity-operator-7c89c5f75 SuccessfulCreate Created pod: auto-provisioned-entity-operator-7c89c5f75-crxqs replicaset-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:10 +0000 UTC Normal Deployment.apps auto-provisioned-entity-operator ScalingReplicaSet Scaled up replica set auto-provisioned-entity-operator-7c89c5f75 to 1 deployment-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:11 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c89c5f75-crxqs Binding Scheduled Successfully assigned kuttl-test-sensible-owl/auto-provisioned-entity-operator-7c89c5f75-crxqs to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:11 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c89c5f75-crxqs AddedInterface Add eth0 [10.131.0.25/23] from ovn-kubernetes logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:11 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c89c5f75-crxqs.spec.containers{topic-operator} Pulling Pulling image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:464b04e622e0b3472e8a1e1ce8a2efd32cf27fc2056d3d589bfe6b5f9ac0bf4e" kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:20 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c89c5f75-crxqs.spec.containers{topic-operator} Pulled Successfully pulled image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:464b04e622e0b3472e8a1e1ce8a2efd32cf27fc2056d3d589bfe6b5f9ac0bf4e" in 8.868s (8.868s including waiting) kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:20 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c89c5f75-crxqs.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:20 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c89c5f75-crxqs.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:20 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c89c5f75-crxqs.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:464b04e622e0b3472e8a1e1ce8a2efd32cf27fc2056d3d589bfe6b5f9ac0bf4e" already present on machine kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:20 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c89c5f75-crxqs.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:20 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c89c5f75-crxqs.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:20 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c89c5f75-crxqs.spec.containers{tls-sidecar} Pulling Pulling image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:29 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c89c5f75-crxqs.spec.containers{tls-sidecar} Pulled Successfully pulled image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" in 9.036s (9.036s including waiting) kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:29 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c89c5f75-crxqs.spec.containers{tls-sidecar} Created Created container tls-sidecar kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:29 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c89c5f75-crxqs.spec.containers{tls-sidecar} Started Started container tls-sidecar kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:52 +0000 UTC Normal Deployment.apps auto-provisioned-ingester ScalingReplicaSet Scaled up replica set auto-provisioned-ingester-6fc6fc6b58 to 1 deployment-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:52 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-5948967dd4 to 1 deployment-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:53 +0000 UTC Normal Pod auto-provisioned-collector-7654c77b64-p9gd9 Binding Scheduled Successfully assigned kuttl-test-sensible-owl/auto-provisioned-collector-7654c77b64-p9gd9 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:53 +0000 UTC Normal Pod auto-provisioned-collector-7654c77b64-p9gd9 AddedInterface Add eth0 [10.131.0.27/23] from ovn-kubernetes logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:53 +0000 UTC Normal Pod auto-provisioned-collector-7654c77b64-p9gd9.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:53 +0000 UTC Normal Pod auto-provisioned-collector-7654c77b64-p9gd9.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:53 +0000 UTC Normal Pod auto-provisioned-collector-7654c77b64-p9gd9.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:53 +0000 UTC Normal ReplicaSet.apps auto-provisioned-collector-7654c77b64 SuccessfulCreate Created pod: auto-provisioned-collector-7654c77b64-p9gd9 replicaset-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:53 +0000 UTC Normal Deployment.apps auto-provisioned-collector ScalingReplicaSet Scaled up replica set auto-provisioned-collector-7654c77b64 to 1 deployment-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:53 +0000 UTC Normal Pod auto-provisioned-ingester-6fc6fc6b58-md9hx Binding Scheduled Successfully assigned kuttl-test-sensible-owl/auto-provisioned-ingester-6fc6fc6b58-md9hx to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:53 +0000 UTC Normal Pod auto-provisioned-ingester-6fc6fc6b58-md9hx AddedInterface Add eth0 [10.131.0.26/23] from ovn-kubernetes logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:53 +0000 UTC Normal Pod auto-provisioned-ingester-6fc6fc6b58-md9hx.spec.containers{jaeger-ingester} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:109a3676bfaccda0021c57bbe82bceed5140faaedb8bad4d7d2be7cd660de039" kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:53 +0000 UTC Normal ReplicaSet.apps auto-provisioned-ingester-6fc6fc6b58 SuccessfulCreate Created pod: auto-provisioned-ingester-6fc6fc6b58-md9hx replicaset-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:53 +0000 UTC Normal Pod auto-provisioned-query-5948967dd4-nc74k Binding Scheduled Successfully assigned kuttl-test-sensible-owl/auto-provisioned-query-5948967dd4-nc74k to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:53 +0000 UTC Warning Pod auto-provisioned-query-5948967dd4-nc74k FailedMount MountVolume.SetUp failed for volume "auto-provisioned-ui-oauth-proxy-tls" : secret "auto-provisioned-ui-oauth-proxy-tls" not found kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:53 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-5948967dd4 SuccessfulCreate Created pod: auto-provisioned-query-5948967dd4-nc74k replicaset-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:54 +0000 UTC Warning Pod auto-provisioned-query-5948967dd4-nc74k FailedMount MountVolume.SetUp failed for volume "kafkauser-auto-provisioned" : failed to sync secret cache: timed out waiting for the condition kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:54 +0000 UTC Normal Pod auto-provisioned-query-5948967dd4-nc74k AddedInterface Add eth0 [10.129.2.21/23] from ovn-kubernetes logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:54 +0000 UTC Normal Pod auto-provisioned-query-5948967dd4-nc74k.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:55 +0000 UTC Normal Pod auto-provisioned-query-5948967dd4-nc74k.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:55 +0000 UTC Normal Pod auto-provisioned-query-5948967dd4-nc74k.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:55 +0000 UTC Normal Pod auto-provisioned-query-5948967dd4-nc74k.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:55 +0000 UTC Normal Pod auto-provisioned-query-5948967dd4-nc74k.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:55 +0000 UTC Normal Pod auto-provisioned-query-5948967dd4-nc74k.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:55 +0000 UTC Normal Pod auto-provisioned-query-5948967dd4-nc74k.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:55 +0000 UTC Normal Pod auto-provisioned-query-5948967dd4-nc74k.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:55 +0000 UTC Normal Pod auto-provisioned-query-5948967dd4-nc74k.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:57 +0000 UTC Normal Pod auto-provisioned-ingester-6fc6fc6b58-md9hx.spec.containers{jaeger-ingester} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:109a3676bfaccda0021c57bbe82bceed5140faaedb8bad4d7d2be7cd660de039" in 4.367s (4.367s including waiting) kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:57 +0000 UTC Normal Pod auto-provisioned-ingester-6fc6fc6b58-md9hx.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:01:57 +0000 UTC Normal Pod auto-provisioned-ingester-6fc6fc6b58-md9hx.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:03 +0000 UTC Normal Pod auto-provisioned-query-57975bcb44-7x8vx Binding Scheduled Successfully assigned kuttl-test-sensible-owl/auto-provisioned-query-57975bcb44-7x8vx to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:03 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-57975bcb44 SuccessfulCreate Created pod: auto-provisioned-query-57975bcb44-7x8vx replicaset-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:03 +0000 UTC Normal Pod auto-provisioned-query-5948967dd4-nc74k.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:03 +0000 UTC Normal Pod auto-provisioned-query-5948967dd4-nc74k.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:03 +0000 UTC Normal Pod auto-provisioned-query-5948967dd4-nc74k.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:03 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-5948967dd4 SuccessfulDelete Deleted pod: auto-provisioned-query-5948967dd4-nc74k replicaset-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:03 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled down replica set auto-provisioned-query-5948967dd4 to 0 from 1 deployment-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:03 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-57975bcb44 to 1 deployment-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:04 +0000 UTC Normal Pod auto-provisioned-query-57975bcb44-7x8vx AddedInterface Add eth0 [10.129.2.22/23] from ovn-kubernetes logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:04 +0000 UTC Normal Pod auto-provisioned-query-57975bcb44-7x8vx.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:04 +0000 UTC Normal Pod auto-provisioned-query-57975bcb44-7x8vx.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:04 +0000 UTC Normal Pod auto-provisioned-query-57975bcb44-7x8vx.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:04 +0000 UTC Normal Pod auto-provisioned-query-57975bcb44-7x8vx.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:04 +0000 UTC Normal Pod auto-provisioned-query-57975bcb44-7x8vx.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:04 +0000 UTC Normal Pod auto-provisioned-query-57975bcb44-7x8vx.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:04 +0000 UTC Normal Pod auto-provisioned-query-57975bcb44-7x8vx.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:04 +0000 UTC Normal Pod auto-provisioned-query-57975bcb44-7x8vx.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:04 +0000 UTC Normal Pod auto-provisioned-query-57975bcb44-7x8vx.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:10 +0000 UTC Normal Pod check-span-8n68p Binding Scheduled Successfully assigned kuttl-test-sensible-owl/check-span-8n68p to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:10 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-8n68p job-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:10 +0000 UTC Normal Pod report-span-csnvc Binding Scheduled Successfully assigned kuttl-test-sensible-owl/report-span-csnvc to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:10 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-csnvc job-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:11 +0000 UTC Normal Pod check-span-8n68p AddedInterface Add eth0 [10.129.2.23/23] from ovn-kubernetes logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:11 +0000 UTC Normal Pod check-span-8n68p.spec.containers{asserts-container} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:11 +0000 UTC Normal Pod report-span-csnvc AddedInterface Add eth0 [10.131.0.28/23] from ovn-kubernetes logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:11 +0000 UTC Normal Pod report-span-csnvc.spec.containers{report-span} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:13 +0000 UTC Normal Pod check-span-8n68p.spec.containers{asserts-container} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" in 2.391s (2.391s including waiting) kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:13 +0000 UTC Normal Pod check-span-8n68p.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:13 +0000 UTC Normal Pod check-span-8n68p.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:13 +0000 UTC Normal Pod report-span-csnvc.spec.containers{report-span} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" in 1.973s (1.973s including waiting) kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:13 +0000 UTC Normal Pod report-span-csnvc.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:13 +0000 UTC Normal Pod report-span-csnvc.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:02:25 | es-streaming-autoprovisioned | 2023-11-13 08:02:24 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:02:25 | es-streaming-autoprovisioned | Deleting namespace: kuttl-test-sensible-owl === CONT kuttl/harness/es-simple-prod logger.go:42: 08:02:43 | es-simple-prod | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:02:43 | es-simple-prod | Creating namespace: kuttl-test-arriving-foxhound logger.go:42: 08:02:43 | es-simple-prod | es-simple-prod events from ns kuttl-test-arriving-foxhound: logger.go:42: 08:02:43 | es-simple-prod | Deleting namespace: kuttl-test-arriving-foxhound === CONT kuttl/harness/es-rollover-autoprov logger.go:42: 08:02:48 | es-rollover-autoprov | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:02:48 | es-rollover-autoprov | Creating namespace: kuttl-test-ultimate-lynx logger.go:42: 08:02:48 | es-rollover-autoprov/1-install | starting test step 1-install logger.go:42: 08:02:49 | es-rollover-autoprov/1-install | Jaeger:kuttl-test-ultimate-lynx/my-jaeger created logger.go:42: 08:03:24 | es-rollover-autoprov/1-install | test step completed 1-install logger.go:42: 08:03:24 | es-rollover-autoprov/2-report-spans | starting test step 2-report-spans logger.go:42: 08:03:24 | es-rollover-autoprov/2-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 08:03:25 | es-rollover-autoprov/2-report-spans | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:03:31 | es-rollover-autoprov/2-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JOB_NUMBER=00 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-00-job.yaml] logger.go:42: 08:03:32 | es-rollover-autoprov/2-report-spans | running command: [sh -c kubectl apply -f report-span-00-job.yaml -n $NAMESPACE] logger.go:42: 08:03:32 | es-rollover-autoprov/2-report-spans | job.batch/00-report-span created logger.go:42: 08:03:56 | es-rollover-autoprov/2-report-spans | test step completed 2-report-spans logger.go:42: 08:03:56 | es-rollover-autoprov/3-check-indices | starting test step 3-check-indices logger.go:42: 08:03:56 | es-rollover-autoprov/3-check-indices | Job:kuttl-test-ultimate-lynx/00-check-indices created logger.go:42: 08:03:59 | es-rollover-autoprov/3-check-indices | test step completed 3-check-indices logger.go:42: 08:03:59 | es-rollover-autoprov/4-check-indices | starting test step 4-check-indices logger.go:42: 08:03:59 | es-rollover-autoprov/4-check-indices | Job:kuttl-test-ultimate-lynx/01-check-indices created logger.go:42: 08:04:03 | es-rollover-autoprov/4-check-indices | test step completed 4-check-indices logger.go:42: 08:04:03 | es-rollover-autoprov/5-install | starting test step 5-install logger.go:42: 08:04:03 | es-rollover-autoprov/5-install | Jaeger:kuttl-test-ultimate-lynx/my-jaeger updated logger.go:42: 08:04:15 | es-rollover-autoprov/5-install | test step completed 5-install logger.go:42: 08:04:15 | es-rollover-autoprov/6-report-spans | starting test step 6-report-spans logger.go:42: 08:04:15 | es-rollover-autoprov/6-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 08:04:22 | es-rollover-autoprov/6-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JOB_NUMBER=02 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-02-job.yaml] logger.go:42: 08:04:23 | es-rollover-autoprov/6-report-spans | running command: [sh -c kubectl apply -f report-span-02-job.yaml -n $NAMESPACE] logger.go:42: 08:04:23 | es-rollover-autoprov/6-report-spans | job.batch/02-report-span created logger.go:42: 08:04:47 | es-rollover-autoprov/6-report-spans | test step completed 6-report-spans logger.go:42: 08:04:47 | es-rollover-autoprov/7-check-indices | starting test step 7-check-indices logger.go:42: 08:04:47 | es-rollover-autoprov/7-check-indices | Job:kuttl-test-ultimate-lynx/02-check-indices created logger.go:42: 08:04:50 | es-rollover-autoprov/7-check-indices | test step completed 7-check-indices logger.go:42: 08:04:50 | es-rollover-autoprov/8-check-indices | starting test step 8-check-indices logger.go:42: 08:04:50 | es-rollover-autoprov/8-check-indices | Job:kuttl-test-ultimate-lynx/03-check-indices created logger.go:42: 08:04:53 | es-rollover-autoprov/8-check-indices | test step completed 8-check-indices logger.go:42: 08:04:53 | es-rollover-autoprov/9-check-indices | starting test step 9-check-indices logger.go:42: 08:04:53 | es-rollover-autoprov/9-check-indices | Job:kuttl-test-ultimate-lynx/04-check-indices created logger.go:42: 08:04:57 | es-rollover-autoprov/9-check-indices | test step completed 9-check-indices logger.go:42: 08:04:57 | es-rollover-autoprov/10-report-spans | starting test step 10-report-spans logger.go:42: 08:04:57 | es-rollover-autoprov/10-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 08:05:05 | es-rollover-autoprov/10-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JOB_NUMBER=03 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-03-job.yaml] logger.go:42: 08:05:06 | es-rollover-autoprov/10-report-spans | running command: [sh -c kubectl apply -f report-span-03-job.yaml -n $NAMESPACE] logger.go:42: 08:05:06 | es-rollover-autoprov/10-report-spans | job.batch/03-report-span created logger.go:42: 08:05:29 | es-rollover-autoprov/10-report-spans | test step completed 10-report-spans logger.go:42: 08:05:29 | es-rollover-autoprov/11-check-indices | starting test step 11-check-indices logger.go:42: 08:05:29 | es-rollover-autoprov/11-check-indices | running command: [sh -c go run ../../../../cmd-utils/wait-cronjob/main.go --cronjob my-jaeger-es-rollover --namespace $NAMESPACE] logger.go:42: 08:05:39 | es-rollover-autoprov/11-check-indices | time="2023-11-13T08:05:39Z" level=debug msg="Checking if the my-jaeger-es-rollover CronJob exists" logger.go:42: 08:05:39 | es-rollover-autoprov/11-check-indices | time="2023-11-13T08:05:39Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 08:05:39 | es-rollover-autoprov/11-check-indices | time="2023-11-13T08:05:39Z" level=info msg="Cronjob my-jaeger-es-rollover found successfully" logger.go:42: 08:05:39 | es-rollover-autoprov/11-check-indices | time="2023-11-13T08:05:39Z" level=debug msg="Waiting for the next scheduled job from my-jaeger-es-rollover cronjob" logger.go:42: 08:05:39 | es-rollover-autoprov/11-check-indices | time="2023-11-13T08:05:39Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 08:05:39 | es-rollover-autoprov/11-check-indices | time="2023-11-13T08:05:39Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 08:05:50 | es-rollover-autoprov/11-check-indices | time="2023-11-13T08:05:50Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 08:06:00 | es-rollover-autoprov/11-check-indices | time="2023-11-13T08:06:00Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 08:06:09 | es-rollover-autoprov/11-check-indices | time="2023-11-13T08:06:09Z" level=info msg="Job of owner my-jaeger-es-rollover succeeded after my-jaeger-es-rollover 30.080466823s" logger.go:42: 08:06:10 | es-rollover-autoprov/11-check-indices | Job:kuttl-test-ultimate-lynx/05-check-indices created logger.go:42: 08:06:14 | es-rollover-autoprov/11-check-indices | test step completed 11-check-indices logger.go:42: 08:06:14 | es-rollover-autoprov/12-check-indices | starting test step 12-check-indices logger.go:42: 08:06:14 | es-rollover-autoprov/12-check-indices | Job:kuttl-test-ultimate-lynx/06-check-indices created logger.go:42: 08:06:18 | es-rollover-autoprov/12-check-indices | test step completed 12-check-indices logger.go:42: 08:06:18 | es-rollover-autoprov | es-rollover-autoprov events from ns kuttl-test-ultimate-lynx: logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:02:55 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestultimatelynxmyjaeger-1-5d796dc766 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestultimatelynxmyjaeger-1-5d796dc77gsvj replicaset-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:02:55 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestultimatelynxmyjaeger-1-5d796dc77gsvj Binding Scheduled Successfully assigned kuttl-test-ultimate-lynx/elasticsearch-cdm-kuttltestultimatelynxmyjaeger-1-5d796dc77gsvj to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:02:55 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestultimatelynxmyjaeger-1-5d796dc77gsvj AddedInterface Add eth0 [10.128.2.24/23] from ovn-kubernetes logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:02:55 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestultimatelynxmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestultimatelynxmyjaeger-1-5d796dc766 to 1 deployment-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:02:56 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestultimatelynxmyjaeger-1-5d796dc77gsvj.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:02:56 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestultimatelynxmyjaeger-1-5d796dc77gsvj.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:02:56 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestultimatelynxmyjaeger-1-5d796dc77gsvj.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:02:56 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestultimatelynxmyjaeger-1-5d796dc77gsvj.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:02:56 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestultimatelynxmyjaeger-1-5d796dc77gsvj.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:02:56 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestultimatelynxmyjaeger-1-5d796dc77gsvj.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:11 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestultimatelynxmyjaeger-1-5d796dc77gsvj.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:21 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-724k6 Binding Scheduled Successfully assigned kuttl-test-ultimate-lynx/my-jaeger-collector-5489f5bd9b-724k6 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:21 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-724k6 AddedInterface Add eth0 [10.131.0.29/23] from ovn-kubernetes logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:21 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-724k6.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:21 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5489f5bd9b SuccessfulCreate Created pod: my-jaeger-collector-5489f5bd9b-724k6 replicaset-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:21 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-5489f5bd9b to 1 deployment-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:21 +0000 UTC Normal Pod my-jaeger-query-7b4b7cb4b6-nfvln Binding Scheduled Successfully assigned kuttl-test-ultimate-lynx/my-jaeger-query-7b4b7cb4b6-nfvln to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:21 +0000 UTC Normal Pod my-jaeger-query-7b4b7cb4b6-nfvln AddedInterface Add eth0 [10.129.2.24/23] from ovn-kubernetes logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:21 +0000 UTC Normal Pod my-jaeger-query-7b4b7cb4b6-nfvln.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:21 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-7b4b7cb4b6 SuccessfulCreate Created pod: my-jaeger-query-7b4b7cb4b6-nfvln replicaset-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:21 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-7b4b7cb4b6 to 1 deployment-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:22 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-724k6.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:22 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-724k6.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:22 +0000 UTC Normal Pod my-jaeger-query-7b4b7cb4b6-nfvln.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:22 +0000 UTC Normal Pod my-jaeger-query-7b4b7cb4b6-nfvln.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:22 +0000 UTC Normal Pod my-jaeger-query-7b4b7cb4b6-nfvln.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:22 +0000 UTC Normal Pod my-jaeger-query-7b4b7cb4b6-nfvln.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:22 +0000 UTC Normal Pod my-jaeger-query-7b4b7cb4b6-nfvln.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:22 +0000 UTC Normal Pod my-jaeger-query-7b4b7cb4b6-nfvln.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:22 +0000 UTC Normal Pod my-jaeger-query-7b4b7cb4b6-nfvln.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:22 +0000 UTC Normal Pod my-jaeger-query-7b4b7cb4b6-nfvln.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:28 +0000 UTC Normal Pod my-jaeger-query-7b4b7cb4b6-nfvln.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:28 +0000 UTC Normal Pod my-jaeger-query-7b4b7cb4b6-nfvln.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:28 +0000 UTC Normal Pod my-jaeger-query-7b4b7cb4b6-nfvln.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:28 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-7b4b7cb4b6 SuccessfulDelete Deleted pod: my-jaeger-query-7b4b7cb4b6-nfvln replicaset-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:28 +0000 UTC Normal Pod my-jaeger-query-8858b855c-6d5qt Binding Scheduled Successfully assigned kuttl-test-ultimate-lynx/my-jaeger-query-8858b855c-6d5qt to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:28 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-8858b855c SuccessfulCreate Created pod: my-jaeger-query-8858b855c-6d5qt replicaset-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:28 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-7b4b7cb4b6 to 0 from 1 deployment-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:28 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-8858b855c to 1 deployment-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:29 +0000 UTC Normal Pod my-jaeger-query-8858b855c-6d5qt AddedInterface Add eth0 [10.129.2.25/23] from ovn-kubernetes logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:29 +0000 UTC Normal Pod my-jaeger-query-8858b855c-6d5qt.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:29 +0000 UTC Normal Pod my-jaeger-query-8858b855c-6d5qt.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:29 +0000 UTC Normal Pod my-jaeger-query-8858b855c-6d5qt.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:29 +0000 UTC Normal Pod my-jaeger-query-8858b855c-6d5qt.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:29 +0000 UTC Normal Pod my-jaeger-query-8858b855c-6d5qt.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:29 +0000 UTC Normal Pod my-jaeger-query-8858b855c-6d5qt.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:29 +0000 UTC Normal Pod my-jaeger-query-8858b855c-6d5qt.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:29 +0000 UTC Normal Pod my-jaeger-query-8858b855c-6d5qt.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:29 +0000 UTC Normal Pod my-jaeger-query-8858b855c-6d5qt.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:32 +0000 UTC Normal Pod 00-report-span-45kwt Binding Scheduled Successfully assigned kuttl-test-ultimate-lynx/00-report-span-45kwt to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:32 +0000 UTC Normal Job.batch 00-report-span SuccessfulCreate Created pod: 00-report-span-45kwt job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:33 +0000 UTC Normal Pod 00-report-span-45kwt AddedInterface Add eth0 [10.131.0.30/23] from ovn-kubernetes logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:33 +0000 UTC Normal Pod 00-report-span-45kwt.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:33 +0000 UTC Normal Pod 00-report-span-45kwt.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:33 +0000 UTC Normal Pod 00-report-span-45kwt.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:39 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:39 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:39 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:55 +0000 UTC Normal Job.batch 00-report-span Completed Job completed job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:56 +0000 UTC Normal Pod 00-check-indices-9scxg Binding Scheduled Successfully assigned kuttl-test-ultimate-lynx/00-check-indices-9scxg to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:56 +0000 UTC Normal Pod 00-check-indices-9scxg AddedInterface Add eth0 [10.131.0.31/23] from ovn-kubernetes logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:56 +0000 UTC Normal Pod 00-check-indices-9scxg.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:56 +0000 UTC Normal Pod 00-check-indices-9scxg.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:56 +0000 UTC Normal Pod 00-check-indices-9scxg.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:56 +0000 UTC Normal Job.batch 00-check-indices SuccessfulCreate Created pod: 00-check-indices-9scxg job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:59 +0000 UTC Normal Job.batch 00-check-indices Completed Job completed job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:59 +0000 UTC Normal Pod 01-check-indices-r9wsm Binding Scheduled Successfully assigned kuttl-test-ultimate-lynx/01-check-indices-r9wsm to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:59 +0000 UTC Normal Pod 01-check-indices-r9wsm AddedInterface Add eth0 [10.131.0.32/23] from ovn-kubernetes logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:59 +0000 UTC Normal Pod 01-check-indices-r9wsm.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:59 +0000 UTC Normal Pod 01-check-indices-r9wsm.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:59 +0000 UTC Normal Pod 01-check-indices-r9wsm.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:03:59 +0000 UTC Normal Job.batch 01-check-indices SuccessfulCreate Created pod: 01-check-indices-r9wsm job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:02 +0000 UTC Normal Job.batch 01-check-indices Completed Job completed job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:04 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-gn5gd Binding Scheduled Successfully assigned kuttl-test-ultimate-lynx/my-jaeger-es-rollover-create-mapping-gn5gd to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:04 +0000 UTC Normal Job.batch my-jaeger-es-rollover-create-mapping SuccessfulCreate Created pod: my-jaeger-es-rollover-create-mapping-gn5gd job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:05 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-gn5gd AddedInterface Add eth0 [10.131.0.33/23] from ovn-kubernetes logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:05 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-gn5gd.spec.containers{my-jaeger-es-rollover-create-mapping} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:edda53eae462f604654d8adc71622101d2f62c9de6ecce4484523341b972723a" kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:07 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-gn5gd.spec.containers{my-jaeger-es-rollover-create-mapping} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:edda53eae462f604654d8adc71622101d2f62c9de6ecce4484523341b972723a" in 2.879s (2.879s including waiting) kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:08 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-gn5gd.spec.containers{my-jaeger-es-rollover-create-mapping} Created Created container my-jaeger-es-rollover-create-mapping kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:08 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-gn5gd.spec.containers{my-jaeger-es-rollover-create-mapping} Started Started container my-jaeger-es-rollover-create-mapping kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:09 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:09 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod my-jaeger-collector-5489f5bd9b-724k6 horizontal-pod-autoscaler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:09 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:10 +0000 UTC Normal Job.batch my-jaeger-es-rollover-create-mapping Completed Job completed job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:11 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-724k6.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:11 +0000 UTC Warning Pod my-jaeger-collector-5489f5bd9b-724k6.spec.containers{jaeger-collector} Unhealthy Readiness probe failed: Get "http://10.131.0.29:14269/": dial tcp 10.131.0.29:14269: connect: connection refused kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:11 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5489f5bd9b SuccessfulDelete Deleted pod: my-jaeger-collector-5489f5bd9b-724k6 replicaset-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:11 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled down replica set my-jaeger-collector-5489f5bd9b to 0 from 1 deployment-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:11 +0000 UTC Normal Pod my-jaeger-query-8858b855c-6d5qt.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:11 +0000 UTC Normal Pod my-jaeger-query-8858b855c-6d5qt.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:11 +0000 UTC Normal Pod my-jaeger-query-8858b855c-6d5qt.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:11 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-8858b855c SuccessfulDelete Deleted pod: my-jaeger-query-8858b855c-6d5qt replicaset-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:11 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-8858b855c to 0 from 1 deployment-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:12 +0000 UTC Normal Pod my-jaeger-collector-7794fb6d5c-swfjk Binding Scheduled Successfully assigned kuttl-test-ultimate-lynx/my-jaeger-collector-7794fb6d5c-swfjk to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:12 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-7794fb6d5c SuccessfulCreate Created pod: my-jaeger-collector-7794fb6d5c-swfjk replicaset-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:12 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-7794fb6d5c to 1 deployment-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:12 +0000 UTC Normal Pod my-jaeger-query-589476cc64-jdn6l Binding Scheduled Successfully assigned kuttl-test-ultimate-lynx/my-jaeger-query-589476cc64-jdn6l to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:12 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-589476cc64 SuccessfulCreate Created pod: my-jaeger-query-589476cc64-jdn6l replicaset-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:12 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-589476cc64 to 1 deployment-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:13 +0000 UTC Normal Pod my-jaeger-collector-7794fb6d5c-swfjk AddedInterface Add eth0 [10.131.0.34/23] from ovn-kubernetes logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:13 +0000 UTC Normal Pod my-jaeger-collector-7794fb6d5c-swfjk.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:13 +0000 UTC Normal Pod my-jaeger-collector-7794fb6d5c-swfjk.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:13 +0000 UTC Normal Pod my-jaeger-collector-7794fb6d5c-swfjk.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:13 +0000 UTC Normal Pod my-jaeger-query-589476cc64-jdn6l AddedInterface Add eth0 [10.129.2.26/23] from ovn-kubernetes logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:13 +0000 UTC Normal Pod my-jaeger-query-589476cc64-jdn6l.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:13 +0000 UTC Normal Pod my-jaeger-query-589476cc64-jdn6l.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:13 +0000 UTC Normal Pod my-jaeger-query-589476cc64-jdn6l.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:13 +0000 UTC Normal Pod my-jaeger-query-589476cc64-jdn6l.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:13 +0000 UTC Normal Pod my-jaeger-query-589476cc64-jdn6l.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:13 +0000 UTC Normal Pod my-jaeger-query-589476cc64-jdn6l.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:13 +0000 UTC Normal Pod my-jaeger-query-589476cc64-jdn6l.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:13 +0000 UTC Normal Pod my-jaeger-query-589476cc64-jdn6l.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:13 +0000 UTC Normal Pod my-jaeger-query-589476cc64-jdn6l.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:23 +0000 UTC Normal Pod 02-report-span-d6nqh Binding Scheduled Successfully assigned kuttl-test-ultimate-lynx/02-report-span-d6nqh to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:23 +0000 UTC Normal Job.batch 02-report-span SuccessfulCreate Created pod: 02-report-span-d6nqh job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:24 +0000 UTC Normal Pod 02-report-span-d6nqh AddedInterface Add eth0 [10.131.0.35/23] from ovn-kubernetes logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:24 +0000 UTC Normal Pod 02-report-span-d6nqh.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:24 +0000 UTC Normal Pod 02-report-span-d6nqh.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:24 +0000 UTC Normal Pod 02-report-span-d6nqh.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:46 +0000 UTC Normal Job.batch 02-report-span Completed Job completed job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:47 +0000 UTC Normal Pod 02-check-indices-78mwh Binding Scheduled Successfully assigned kuttl-test-ultimate-lynx/02-check-indices-78mwh to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:47 +0000 UTC Normal Pod 02-check-indices-78mwh AddedInterface Add eth0 [10.131.0.36/23] from ovn-kubernetes logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:47 +0000 UTC Normal Pod 02-check-indices-78mwh.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:47 +0000 UTC Normal Pod 02-check-indices-78mwh.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:47 +0000 UTC Normal Pod 02-check-indices-78mwh.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:47 +0000 UTC Normal Job.batch 02-check-indices SuccessfulCreate Created pod: 02-check-indices-78mwh job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:50 +0000 UTC Normal Job.batch 02-check-indices Completed Job completed job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:50 +0000 UTC Normal Pod 03-check-indices-wwmvf Binding Scheduled Successfully assigned kuttl-test-ultimate-lynx/03-check-indices-wwmvf to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:50 +0000 UTC Normal Pod 03-check-indices-wwmvf AddedInterface Add eth0 [10.131.0.37/23] from ovn-kubernetes logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:50 +0000 UTC Normal Pod 03-check-indices-wwmvf.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:50 +0000 UTC Normal Pod 03-check-indices-wwmvf.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:50 +0000 UTC Normal Pod 03-check-indices-wwmvf.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:50 +0000 UTC Normal Job.batch 03-check-indices SuccessfulCreate Created pod: 03-check-indices-wwmvf job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:53 +0000 UTC Normal Job.batch 03-check-indices Completed Job completed job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:53 +0000 UTC Normal Pod 04-check-indices-glwlm Binding Scheduled Successfully assigned kuttl-test-ultimate-lynx/04-check-indices-glwlm to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:53 +0000 UTC Normal Pod 04-check-indices-glwlm AddedInterface Add eth0 [10.131.0.38/23] from ovn-kubernetes logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:53 +0000 UTC Normal Pod 04-check-indices-glwlm.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:53 +0000 UTC Normal Job.batch 04-check-indices SuccessfulCreate Created pod: 04-check-indices-glwlm job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:54 +0000 UTC Normal Pod 04-check-indices-glwlm.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:54 +0000 UTC Normal Pod 04-check-indices-glwlm.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:04:57 +0000 UTC Normal Job.batch 04-check-indices Completed Job completed job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28331045-28h2q Binding Scheduled Successfully assigned kuttl-test-ultimate-lynx/my-jaeger-es-lookback-28331045-28h2q to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28331045-28h2q AddedInterface Add eth0 [10.131.0.40/23] from ovn-kubernetes logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28331045-28h2q.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:edda53eae462f604654d8adc71622101d2f62c9de6ecce4484523341b972723a" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28331045-28h2q.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28331045-28h2q.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28331045 SuccessfulCreate Created pod: my-jaeger-es-lookback-28331045-28h2q job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28331045 cronjob-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28331045-qvgpq Binding Scheduled Successfully assigned kuttl-test-ultimate-lynx/my-jaeger-es-rollover-28331045-qvgpq to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28331045-qvgpq AddedInterface Add eth0 [10.131.0.39/23] from ovn-kubernetes logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28331045-qvgpq.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:edda53eae462f604654d8adc71622101d2f62c9de6ecce4484523341b972723a" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28331045-qvgpq.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28331045-qvgpq.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28331045 SuccessfulCreate Created pod: my-jaeger-es-rollover-28331045-qvgpq job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28331045 cronjob-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28331045 Completed Job completed job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28331045, status: Complete cronjob-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28331045 Completed Job completed job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28331045, status: Complete cronjob-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:06 +0000 UTC Normal Pod 03-report-span-29rfw Binding Scheduled Successfully assigned kuttl-test-ultimate-lynx/03-report-span-29rfw to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:06 +0000 UTC Normal Pod 03-report-span-29rfw AddedInterface Add eth0 [10.131.0.41/23] from ovn-kubernetes logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:06 +0000 UTC Normal Pod 03-report-span-29rfw.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:06 +0000 UTC Normal Pod 03-report-span-29rfw.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:06 +0000 UTC Normal Pod 03-report-span-29rfw.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:06 +0000 UTC Normal Job.batch 03-report-span SuccessfulCreate Created pod: 03-report-span-29rfw job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:09 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod my-jaeger-collector-7794fb6d5c-swfjk horizontal-pod-autoscaler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:05:29 +0000 UTC Normal Job.batch 03-report-span Completed Job completed job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28331046-kgkt6 Binding Scheduled Successfully assigned kuttl-test-ultimate-lynx/my-jaeger-es-lookback-28331046-kgkt6 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28331046-kgkt6 AddedInterface Add eth0 [10.131.0.42/23] from ovn-kubernetes logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28331046-kgkt6.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:edda53eae462f604654d8adc71622101d2f62c9de6ecce4484523341b972723a" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28331046-kgkt6.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28331046-kgkt6.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28331046 SuccessfulCreate Created pod: my-jaeger-es-lookback-28331046-kgkt6 job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28331046 cronjob-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28331046-j75lv Binding Scheduled Successfully assigned kuttl-test-ultimate-lynx/my-jaeger-es-rollover-28331046-j75lv to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28331046-j75lv AddedInterface Add eth0 [10.131.0.43/23] from ovn-kubernetes logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28331046-j75lv.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:edda53eae462f604654d8adc71622101d2f62c9de6ecce4484523341b972723a" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28331046-j75lv.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28331046-j75lv.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28331046 SuccessfulCreate Created pod: my-jaeger-es-rollover-28331046-j75lv job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28331046 cronjob-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28331046 Completed Job completed job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28331046, status: Complete cronjob-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28331046 Completed Job completed job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28331046, status: Complete cronjob-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:10 +0000 UTC Normal Pod 05-check-indices-cx9t7 Binding Scheduled Successfully assigned kuttl-test-ultimate-lynx/05-check-indices-cx9t7 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:10 +0000 UTC Normal Pod 05-check-indices-cx9t7 AddedInterface Add eth0 [10.131.0.44/23] from ovn-kubernetes logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:10 +0000 UTC Normal Pod 05-check-indices-cx9t7.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:10 +0000 UTC Normal Pod 05-check-indices-cx9t7.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:10 +0000 UTC Normal Pod 05-check-indices-cx9t7.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:10 +0000 UTC Normal Job.batch 05-check-indices SuccessfulCreate Created pod: 05-check-indices-cx9t7 job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:13 +0000 UTC Normal Job.batch 05-check-indices Completed Job completed job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:14 +0000 UTC Normal Pod 06-check-indices-mf4js Binding Scheduled Successfully assigned kuttl-test-ultimate-lynx/06-check-indices-mf4js to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:14 +0000 UTC Normal Pod 06-check-indices-mf4js AddedInterface Add eth0 [10.131.0.45/23] from ovn-kubernetes logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:14 +0000 UTC Normal Pod 06-check-indices-mf4js.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:14 +0000 UTC Normal Pod 06-check-indices-mf4js.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:14 +0000 UTC Normal Pod 06-check-indices-mf4js.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:14 +0000 UTC Normal Job.batch 06-check-indices SuccessfulCreate Created pod: 06-check-indices-mf4js job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | 2023-11-13 08:06:17 +0000 UTC Normal Job.batch 06-check-indices Completed Job completed job-controller logger.go:42: 08:06:18 | es-rollover-autoprov | Deleting namespace: kuttl-test-ultimate-lynx === CONT kuttl/harness/es-increasing-replicas logger.go:42: 08:06:25 | es-increasing-replicas | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:06:25 | es-increasing-replicas | Ignoring check-es-nodes.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:06:25 | es-increasing-replicas | Ignoring openshift-check-es-nodes.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:06:25 | es-increasing-replicas | Creating namespace: kuttl-test-aware-crab logger.go:42: 08:06:25 | es-increasing-replicas/1-install | starting test step 1-install logger.go:42: 08:06:25 | es-increasing-replicas/1-install | Jaeger:kuttl-test-aware-crab/simple-prod created logger.go:42: 08:07:01 | es-increasing-replicas/1-install | test step completed 1-install logger.go:42: 08:07:01 | es-increasing-replicas/2-install | starting test step 2-install logger.go:42: 08:07:01 | es-increasing-replicas/2-install | Jaeger:kuttl-test-aware-crab/simple-prod updated logger.go:42: 08:07:11 | es-increasing-replicas/2-install | test step completed 2-install logger.go:42: 08:07:11 | es-increasing-replicas/3-smoke-test | starting test step 3-smoke-test logger.go:42: 08:07:11 | es-increasing-replicas/3-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 08:07:12 | es-increasing-replicas/3-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:07:19 | es-increasing-replicas/3-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:07:20 | es-increasing-replicas/3-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:07:20 | es-increasing-replicas/3-smoke-test | job.batch/report-span created logger.go:42: 08:07:20 | es-increasing-replicas/3-smoke-test | job.batch/check-span created logger.go:42: 08:07:31 | es-increasing-replicas/3-smoke-test | test step completed 3-smoke-test logger.go:42: 08:07:31 | es-increasing-replicas/4-install | starting test step 4-install logger.go:42: 08:07:31 | es-increasing-replicas/4-install | Jaeger:kuttl-test-aware-crab/simple-prod updated logger.go:42: 08:07:31 | es-increasing-replicas/4-install | test step completed 4-install logger.go:42: 08:07:31 | es-increasing-replicas/5-check-es-nodes | starting test step 5-check-es-nodes logger.go:42: 08:07:31 | es-increasing-replicas/5-check-es-nodes | running command: [sh -c ./check-es-nodes.sh $NAMESPACE] logger.go:42: 08:07:31 | es-increasing-replicas/5-check-es-nodes | Checking if the number of ES instances is the expected logger.go:42: 08:07:32 | es-increasing-replicas/5-check-es-nodes | false logger.go:42: 08:07:32 | es-increasing-replicas/5-check-es-nodes | Error: no matches found logger.go:42: 08:07:37 | es-increasing-replicas/5-check-es-nodes | Checking if the number of ES instances is the expected logger.go:42: 08:07:37 | es-increasing-replicas/5-check-es-nodes | true logger.go:42: 08:07:37 | es-increasing-replicas/5-check-es-nodes | test step completed 5-check-es-nodes logger.go:42: 08:07:37 | es-increasing-replicas | es-increasing-replicas events from ns kuttl-test-aware-crab: logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:32 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestawarecrabsimpleprod-1-7786ddbc68 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestawarecrabsimpleprod-1-7786ddbc6rqgwm replicaset-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestawarecrabsimpleprod-1-7786ddbc6rqgwm Binding Scheduled Successfully assigned kuttl-test-aware-crab/elasticsearch-cdm-kuttltestawarecrabsimpleprod-1-7786ddbc6rqgwm to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:32 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestawarecrabsimpleprod-1-7786ddbc6rqgwm FailedMount MountVolume.SetUp failed for volume "elasticsearch-metrics" : secret "elasticsearch-metrics" not found kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:32 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestawarecrabsimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestawarecrabsimpleprod-1-7786ddbc68 to 1 deployment-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestawarecrabsimpleprod-1-7786ddbc6rqgwm AddedInterface Add eth0 [10.128.2.25/23] from ovn-kubernetes logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestawarecrabsimpleprod-1-7786ddbc6rqgwm.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestawarecrabsimpleprod-1-7786ddbc6rqgwm.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestawarecrabsimpleprod-1-7786ddbc6rqgwm.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestawarecrabsimpleprod-1-7786ddbc6rqgwm.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestawarecrabsimpleprod-1-7786ddbc6rqgwm.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestawarecrabsimpleprod-1-7786ddbc6rqgwm.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:43 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestawarecrabsimpleprod-1-7786ddbc6rqgwm.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:48 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestawarecrabsimpleprod-1-7786ddbc6rqgwm.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:59 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-2hw68 Binding Scheduled Successfully assigned kuttl-test-aware-crab/simple-prod-collector-77fcbdc546-2hw68 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:59 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-2hw68 AddedInterface Add eth0 [10.131.0.46/23] from ovn-kubernetes logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:59 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-2hw68.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:59 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-2hw68.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:59 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-2hw68.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:59 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-77fcbdc546 SuccessfulCreate Created pod: simple-prod-collector-77fcbdc546-2hw68 replicaset-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:59 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-77fcbdc546 to 1 deployment-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:59 +0000 UTC Normal Pod simple-prod-query-868484bb49-flg25 Binding Scheduled Successfully assigned kuttl-test-aware-crab/simple-prod-query-868484bb49-flg25 to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:59 +0000 UTC Normal Pod simple-prod-query-868484bb49-flg25 AddedInterface Add eth0 [10.129.2.27/23] from ovn-kubernetes logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:59 +0000 UTC Normal Pod simple-prod-query-868484bb49-flg25.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:59 +0000 UTC Normal Pod simple-prod-query-868484bb49-flg25.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:59 +0000 UTC Normal Pod simple-prod-query-868484bb49-flg25.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:59 +0000 UTC Normal Pod simple-prod-query-868484bb49-flg25.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:59 +0000 UTC Normal Pod simple-prod-query-868484bb49-flg25.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:59 +0000 UTC Normal Pod simple-prod-query-868484bb49-flg25.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:59 +0000 UTC Normal Pod simple-prod-query-868484bb49-flg25.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:59 +0000 UTC Normal Pod simple-prod-query-868484bb49-flg25.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:59 +0000 UTC Normal Pod simple-prod-query-868484bb49-flg25.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:59 +0000 UTC Normal ReplicaSet.apps simple-prod-query-868484bb49 SuccessfulCreate Created pod: simple-prod-query-868484bb49-flg25 replicaset-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:06:59 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-868484bb49 to 1 deployment-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:03 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-s2x76 Binding Scheduled Successfully assigned kuttl-test-aware-crab/simple-prod-collector-77fcbdc546-s2x76 to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:03 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-s2x76 AddedInterface Add eth0 [10.129.2.28/23] from ovn-kubernetes logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:03 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-s2x76.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:03 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-s2x76.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:03 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-s2x76.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:03 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-77fcbdc546 SuccessfulCreate Created pod: simple-prod-collector-77fcbdc546-s2x76 replicaset-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:03 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-77fcbdc546 to 2 from 1 deployment-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:03 +0000 UTC Normal Pod simple-prod-query-868484bb49-6d29t Binding Scheduled Successfully assigned kuttl-test-aware-crab/simple-prod-query-868484bb49-6d29t to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:03 +0000 UTC Normal Pod simple-prod-query-868484bb49-6d29t AddedInterface Add eth0 [10.131.0.47/23] from ovn-kubernetes logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:03 +0000 UTC Normal Pod simple-prod-query-868484bb49-6d29t.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:03 +0000 UTC Normal ReplicaSet.apps simple-prod-query-868484bb49 SuccessfulCreate Created pod: simple-prod-query-868484bb49-6d29t replicaset-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:03 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-868484bb49 to 2 from 1 deployment-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:06 +0000 UTC Normal Pod simple-prod-query-868484bb49-6d29t.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" in 2.364s (2.364s including waiting) kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:06 +0000 UTC Normal Pod simple-prod-query-868484bb49-6d29t.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:06 +0000 UTC Normal Pod simple-prod-query-868484bb49-6d29t.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:06 +0000 UTC Normal Pod simple-prod-query-868484bb49-6d29t.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:06 +0000 UTC Normal Pod simple-prod-query-868484bb49-6d29t.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:06 +0000 UTC Normal Pod simple-prod-query-868484bb49-6d29t.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:06 +0000 UTC Normal Pod simple-prod-query-868484bb49-6d29t.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:08 +0000 UTC Normal Pod simple-prod-query-868484bb49-6d29t.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" in 1.963s (1.963s including waiting) kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:08 +0000 UTC Normal Pod simple-prod-query-868484bb49-6d29t.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:08 +0000 UTC Normal Pod simple-prod-query-868484bb49-6d29t.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:14 +0000 UTC Normal Pod simple-prod-query-868484bb49-6d29t.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:14 +0000 UTC Normal Pod simple-prod-query-868484bb49-6d29t.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:14 +0000 UTC Normal Pod simple-prod-query-868484bb49-6d29t.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:14 +0000 UTC Normal Pod simple-prod-query-868484bb49-flg25.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:14 +0000 UTC Normal Pod simple-prod-query-868484bb49-flg25.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:14 +0000 UTC Normal Pod simple-prod-query-868484bb49-flg25.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:14 +0000 UTC Normal ReplicaSet.apps simple-prod-query-868484bb49 SuccessfulDelete Deleted pod: simple-prod-query-868484bb49-6d29t replicaset-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:14 +0000 UTC Normal ReplicaSet.apps simple-prod-query-868484bb49 SuccessfulDelete Deleted pod: simple-prod-query-868484bb49-flg25 replicaset-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:14 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-868484bb49 to 0 from 2 deployment-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:15 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-fs2gw Binding Scheduled Successfully assigned kuttl-test-aware-crab/simple-prod-query-74bb8dcf78-fs2gw to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:15 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-lbhs7 Binding Scheduled Successfully assigned kuttl-test-aware-crab/simple-prod-query-74bb8dcf78-lbhs7 to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:15 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-lbhs7 AddedInterface Add eth0 [10.129.2.29/23] from ovn-kubernetes logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:15 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-lbhs7.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:15 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-lbhs7.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:15 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-lbhs7.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:15 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-lbhs7.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:15 +0000 UTC Normal ReplicaSet.apps simple-prod-query-74bb8dcf78 SuccessfulCreate Created pod: simple-prod-query-74bb8dcf78-lbhs7 replicaset-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:15 +0000 UTC Normal ReplicaSet.apps simple-prod-query-74bb8dcf78 SuccessfulCreate Created pod: simple-prod-query-74bb8dcf78-fs2gw replicaset-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:15 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-74bb8dcf78 to 2 deployment-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:16 +0000 UTC Warning Pod simple-prod-query-74bb8dcf78-fs2gw FailedToRetrieveImagePullSecret Unable to retrieve some image pull secrets (simple-prod-ui-proxy-dockercfg-twlr4); attempting to pull the image may not succeed. kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:16 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-fs2gw AddedInterface Add eth0 [10.131.0.48/23] from ovn-kubernetes logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:16 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-fs2gw.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:16 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-fs2gw.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:16 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-fs2gw.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:16 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-fs2gw.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:16 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-lbhs7.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:16 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-lbhs7.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:16 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-lbhs7.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:16 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-lbhs7.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:16 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-lbhs7.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:17 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-fs2gw.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:17 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-fs2gw.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:17 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-fs2gw.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:17 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-fs2gw.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:17 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-fs2gw.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:20 +0000 UTC Normal Pod check-span-xmfrl Binding Scheduled Successfully assigned kuttl-test-aware-crab/check-span-xmfrl to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:20 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-xmfrl job-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:20 +0000 UTC Normal Pod report-span-npkqd Binding Scheduled Successfully assigned kuttl-test-aware-crab/report-span-npkqd to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:20 +0000 UTC Normal Pod report-span-npkqd AddedInterface Add eth0 [10.131.0.49/23] from ovn-kubernetes logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:20 +0000 UTC Normal Pod report-span-npkqd.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:20 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-npkqd job-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:21 +0000 UTC Normal Pod check-span-xmfrl AddedInterface Add eth0 [10.129.2.30/23] from ovn-kubernetes logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:21 +0000 UTC Normal Pod check-span-xmfrl.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:21 +0000 UTC Normal Pod check-span-xmfrl.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:21 +0000 UTC Normal Pod check-span-xmfrl.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:21 +0000 UTC Normal Pod report-span-npkqd.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:21 +0000 UTC Normal Pod report-span-npkqd.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:31 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestawarecrabsimpleprod-2-6565cf6cb SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestawarecrabsimpleprod-2-6565cf6cb24j9k replicaset-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestawarecrabsimpleprod-2-6565cf6cb24j9k Binding Scheduled Successfully assigned kuttl-test-aware-crab/elasticsearch-cdm-kuttltestawarecrabsimpleprod-2-6565cf6cb24j9k to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestawarecrabsimpleprod-2-6565cf6cb24j9k AddedInterface Add eth0 [10.131.0.50/23] from ovn-kubernetes logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestawarecrabsimpleprod-2-6565cf6cb24j9k.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestawarecrabsimpleprod-2-6565cf6cb24j9k.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestawarecrabsimpleprod-2-6565cf6cb24j9k.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestawarecrabsimpleprod-2-6565cf6cb24j9k.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestawarecrabsimpleprod-2-6565cf6cb24j9k.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestawarecrabsimpleprod-2-6565cf6cb24j9k.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestawarecrabsimpleprod-2 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestawarecrabsimpleprod-2-6565cf6cb to 1 deployment-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-2hw68.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-s2x76.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-77fcbdc546 SuccessfulDelete Deleted pod: simple-prod-collector-77fcbdc546-s2x76 replicaset-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-77fcbdc546 SuccessfulDelete Deleted pod: simple-prod-collector-77fcbdc546-2hw68 replicaset-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Warning Endpoints simple-prod-collector-headless FailedToUpdateEndpoint Failed to update endpoint kuttl-test-aware-crab/simple-prod-collector-headless: Operation cannot be fulfilled on endpoints "simple-prod-collector-headless": the object has been modified; please apply your changes to the latest version and try again endpoint-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled down replica set simple-prod-collector-77fcbdc546 to 0 from 2 deployment-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-fs2gw.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-fs2gw.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-fs2gw.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-lbhs7.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-lbhs7.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal Pod simple-prod-query-74bb8dcf78-lbhs7.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal ReplicaSet.apps simple-prod-query-74bb8dcf78 SuccessfulDelete Deleted pod: simple-prod-query-74bb8dcf78-fs2gw replicaset-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal ReplicaSet.apps simple-prod-query-74bb8dcf78 SuccessfulDelete Deleted pod: simple-prod-query-74bb8dcf78-lbhs7 replicaset-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:33 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-74bb8dcf78 to 0 from 2 deployment-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:34 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-8kghs Binding Scheduled Successfully assigned kuttl-test-aware-crab/simple-prod-collector-5db88495b5-8kghs to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:34 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-r4cxq Binding Scheduled Successfully assigned kuttl-test-aware-crab/simple-prod-collector-5db88495b5-r4cxq to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:34 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-r4cxq AddedInterface Add eth0 [10.129.2.31/23] from ovn-kubernetes logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:34 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-r4cxq.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:34 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5db88495b5 SuccessfulCreate Created pod: simple-prod-collector-5db88495b5-r4cxq replicaset-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:34 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5db88495b5 SuccessfulCreate Created pod: simple-prod-collector-5db88495b5-8kghs replicaset-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:34 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-5db88495b5 to 2 deployment-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:34 +0000 UTC Normal Pod simple-prod-query-694cbd7449-7rznn Binding Scheduled Successfully assigned kuttl-test-aware-crab/simple-prod-query-694cbd7449-7rznn to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:34 +0000 UTC Normal Pod simple-prod-query-694cbd7449-tjv6v Binding Scheduled Successfully assigned kuttl-test-aware-crab/simple-prod-query-694cbd7449-tjv6v to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:34 +0000 UTC Normal Pod simple-prod-query-694cbd7449-tjv6v AddedInterface Add eth0 [10.129.2.32/23] from ovn-kubernetes logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:34 +0000 UTC Normal ReplicaSet.apps simple-prod-query-694cbd7449 SuccessfulCreate Created pod: simple-prod-query-694cbd7449-tjv6v replicaset-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:34 +0000 UTC Normal ReplicaSet.apps simple-prod-query-694cbd7449 SuccessfulCreate Created pod: simple-prod-query-694cbd7449-7rznn replicaset-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:34 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-694cbd7449 to 2 deployment-controller logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-8kghs AddedInterface Add eth0 [10.131.0.51/23] from ovn-kubernetes logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-8kghs.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-8kghs.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-8kghs.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-r4cxq.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-r4cxq.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-query-694cbd7449-7rznn AddedInterface Add eth0 [10.131.0.52/23] from ovn-kubernetes logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-query-694cbd7449-7rznn.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-query-694cbd7449-7rznn.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-query-694cbd7449-7rznn.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-query-694cbd7449-7rznn.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-query-694cbd7449-7rznn.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-query-694cbd7449-7rznn.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-query-694cbd7449-7rznn.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-query-694cbd7449-7rznn.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-query-694cbd7449-7rznn.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-query-694cbd7449-tjv6v.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-query-694cbd7449-tjv6v.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-query-694cbd7449-tjv6v.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-query-694cbd7449-tjv6v.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-query-694cbd7449-tjv6v.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-query-694cbd7449-tjv6v.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-query-694cbd7449-tjv6v.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-query-694cbd7449-tjv6v.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:07:37 | es-increasing-replicas | 2023-11-13 08:07:35 +0000 UTC Normal Pod simple-prod-query-694cbd7449-tjv6v.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:07:37 | es-increasing-replicas | Deleting namespace: kuttl-test-aware-crab === CONT kuttl/harness/es-index-cleaner-autoprov logger.go:42: 08:08:12 | es-index-cleaner-autoprov | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:08:12 | es-index-cleaner-autoprov | Creating namespace: kuttl-test-crack-fox logger.go:42: 08:08:12 | es-index-cleaner-autoprov/1-install | starting test step 1-install logger.go:42: 08:08:12 | es-index-cleaner-autoprov/1-install | Jaeger:kuttl-test-crack-fox/test-es-index-cleaner-with-prefix created logger.go:42: 08:08:49 | es-index-cleaner-autoprov/1-install | test step completed 1-install logger.go:42: 08:08:49 | es-index-cleaner-autoprov/2-report-spans | starting test step 2-report-spans logger.go:42: 08:08:49 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE test-es-index-cleaner-with-prefix /dev/null] logger.go:42: 08:08:51 | es-index-cleaner-autoprov/2-report-spans | Warning: resource jaegers/test-es-index-cleaner-with-prefix is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:08:58 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c DAYS=5 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JOB_NUMBER=00 JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-00-job.yaml] logger.go:42: 08:08:59 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c kubectl apply -f report-span-00-job.yaml -n $NAMESPACE] logger.go:42: 08:08:59 | es-index-cleaner-autoprov/2-report-spans | job.batch/00-report-span created logger.go:42: 08:09:38 | es-index-cleaner-autoprov/2-report-spans | test step completed 2-report-spans logger.go:42: 08:09:38 | es-index-cleaner-autoprov/3-install | starting test step 3-install logger.go:42: 08:09:38 | es-index-cleaner-autoprov/3-install | Jaeger:kuttl-test-crack-fox/test-es-index-cleaner-with-prefix updated logger.go:42: 08:09:38 | es-index-cleaner-autoprov/3-install | test step completed 3-install logger.go:42: 08:09:38 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | starting test step 4-wait-es-index-cleaner logger.go:42: 08:09:38 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | running command: [sh -c go run ../../../../cmd-utils/wait-cronjob/main.go --cronjob test-es-index-cleaner-with-prefix-es-index-cleaner --namespace $NAMESPACE] logger.go:42: 08:09:39 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-13T08:09:39Z" level=debug msg="Checking if the test-es-index-cleaner-with-prefix-es-index-cleaner CronJob exists" logger.go:42: 08:09:39 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-13T08:09:39Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 08:09:39 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-13T08:09:39Z" level=info msg="Cronjob test-es-index-cleaner-with-prefix-es-index-cleaner found successfully" logger.go:42: 08:09:39 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-13T08:09:39Z" level=debug msg="Waiting for the next scheduled job from test-es-index-cleaner-with-prefix-es-index-cleaner cronjob" logger.go:42: 08:09:39 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-13T08:09:39Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 08:09:39 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-13T08:09:39Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 08:09:49 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-13T08:09:49Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 08:09:59 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-13T08:09:59Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 08:10:09 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-13T08:10:09Z" level=info msg="Job of owner test-es-index-cleaner-with-prefix-es-index-cleaner succeeded after test-es-index-cleaner-with-prefix-es-index-cleaner 30.060915278s" logger.go:42: 08:10:09 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | test step completed 4-wait-es-index-cleaner logger.go:42: 08:10:09 | es-index-cleaner-autoprov/5-install | starting test step 5-install logger.go:42: 08:10:10 | es-index-cleaner-autoprov/5-install | Jaeger:kuttl-test-crack-fox/test-es-index-cleaner-with-prefix updated logger.go:42: 08:10:10 | es-index-cleaner-autoprov/5-install | test step completed 5-install logger.go:42: 08:10:10 | es-index-cleaner-autoprov/6-check-indices | starting test step 6-check-indices logger.go:42: 08:10:10 | es-index-cleaner-autoprov/6-check-indices | Job:kuttl-test-crack-fox/00-check-indices created logger.go:42: 08:10:13 | es-index-cleaner-autoprov/6-check-indices | test step completed 6-check-indices logger.go:42: 08:10:13 | es-index-cleaner-autoprov | es-index-cleaner-autoprov events from ns kuttl-test-crack-fox: logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:19 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestcrackfoxtestesindexcleaner-1-7c8959968d SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestcrackfoxtestesindexcleaner-1-7czln9k replicaset-controller logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:19 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcrackfoxtestesindexcleaner-1-7czln9k Binding Scheduled Successfully assigned kuttl-test-crack-fox/elasticsearch-cdm-kuttltestcrackfoxtestesindexcleaner-1-7czln9k to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:19 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestcrackfoxtestesindexcleaner-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestcrackfoxtestesindexcleaner-1-7c8959968d to 1 deployment-controller logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcrackfoxtestesindexcleaner-1-7czln9k AddedInterface Add eth0 [10.128.2.26/23] from ovn-kubernetes logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcrackfoxtestesindexcleaner-1-7czln9k.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcrackfoxtestesindexcleaner-1-7czln9k.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcrackfoxtestesindexcleaner-1-7czln9k.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcrackfoxtestesindexcleaner-1-7czln9k.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcrackfoxtestesindexcleaner-1-7czln9k.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcrackfoxtestesindexcleaner-1-7czln9k.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:30 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestcrackfoxtestesindexcleaner-1-7czln9k.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:35 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestcrackfoxtestesindexcleaner-1-7czln9k.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:46 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-7f88446db8-ll99t Binding Scheduled Successfully assigned kuttl-test-crack-fox/test-es-index-cleaner-with-prefix-collector-7f88446db8-ll99t to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:46 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-7f88446db8-ll99t AddedInterface Add eth0 [10.131.0.53/23] from ovn-kubernetes logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:46 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-7f88446db8-ll99t.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:46 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-7f88446db8-ll99t.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:46 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-7f88446db8-ll99t.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:46 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-collector-7f88446db8 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-collector-7f88446db8-ll99t replicaset-controller logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:46 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-collector ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-collector-7f88446db8 to 1 deployment-controller logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:46 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-8548ffdf9-fvdh7 Binding Scheduled Successfully assigned kuttl-test-crack-fox/test-es-index-cleaner-with-prefix-query-8548ffdf9-fvdh7 to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:46 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-8548ffdf9 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-query-8548ffdf9-fvdh7 replicaset-controller logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:46 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-query-8548ffdf9 to 1 deployment-controller logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:47 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-8548ffdf9-fvdh7 AddedInterface Add eth0 [10.129.2.33/23] from ovn-kubernetes logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:47 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-8548ffdf9-fvdh7.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:47 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-8548ffdf9-fvdh7.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:47 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-8548ffdf9-fvdh7.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:47 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-8548ffdf9-fvdh7.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:47 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-8548ffdf9-fvdh7.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:47 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-8548ffdf9-fvdh7.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:47 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-8548ffdf9-fvdh7.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:47 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-8548ffdf9-fvdh7.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:47 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-8548ffdf9-fvdh7.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:55 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-8548ffdf9-fvdh7.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:55 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-8548ffdf9-fvdh7.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:55 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-8548ffdf9-fvdh7.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:55 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-8548ffdf9 SuccessfulDelete Deleted pod: test-es-index-cleaner-with-prefix-query-8548ffdf9-fvdh7 replicaset-controller logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:55 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled down replica set test-es-index-cleaner-with-prefix-query-8548ffdf9 to 0 from 1 deployment-controller logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:56 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6c9b5fd649-chqb4 Binding Scheduled Successfully assigned kuttl-test-crack-fox/test-es-index-cleaner-with-prefix-query-6c9b5fd649-chqb4 to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:56 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-6c9b5fd649 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-query-6c9b5fd649-chqb4 replicaset-controller logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:56 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-query-6c9b5fd649 to 1 deployment-controller logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:57 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6c9b5fd649-chqb4 AddedInterface Add eth0 [10.129.2.34/23] from ovn-kubernetes logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:57 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6c9b5fd649-chqb4.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:57 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6c9b5fd649-chqb4.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:57 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6c9b5fd649-chqb4.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:57 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6c9b5fd649-chqb4.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:57 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6c9b5fd649-chqb4.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:57 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6c9b5fd649-chqb4.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:57 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6c9b5fd649-chqb4.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:57 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6c9b5fd649-chqb4.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:57 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-6c9b5fd649-chqb4.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:59 +0000 UTC Normal Pod 00-report-span-5cmvb Binding Scheduled Successfully assigned kuttl-test-crack-fox/00-report-span-5cmvb to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:59 +0000 UTC Normal Pod 00-report-span-5cmvb AddedInterface Add eth0 [10.131.0.54/23] from ovn-kubernetes logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:59 +0000 UTC Normal Pod 00-report-span-5cmvb.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:08:59 +0000 UTC Normal Job.batch 00-report-span SuccessfulCreate Created pod: 00-report-span-5cmvb job-controller logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:09:00 +0000 UTC Normal Pod 00-report-span-5cmvb.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:09:00 +0000 UTC Normal Pod 00-report-span-5cmvb.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:09:04 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:09:04 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:09:04 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:09:34 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:09:34 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:09:37 +0000 UTC Normal Job.batch 00-report-span Completed Job completed job-controller logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:09:49 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod test-es-index-cleaner-with-prefix-collector-7f88446db8-ll99t horizontal-pod-autoscaler logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:10:00 +0000 UTC Normal Job.batch test-es-index-cleaner-with-prefix-es-index-cleaner-28331050 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-es-index-cleaner-2833105h9htq job-controller logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:10:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2833105h9htq Binding Scheduled Successfully assigned kuttl-test-crack-fox/test-es-index-cleaner-with-prefix-es-index-cleaner-2833105h9htq to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:10:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2833105h9htq AddedInterface Add eth0 [10.131.0.55/23] from ovn-kubernetes logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:10:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2833105h9htq.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-index-cleaner-rhel8@sha256:08ca2463363916637592e6c1cc1731784e07860269292b216db3e6fd0eb44382" kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:10:00 +0000 UTC Normal CronJob.batch test-es-index-cleaner-with-prefix-es-index-cleaner SuccessfulCreate Created job test-es-index-cleaner-with-prefix-es-index-cleaner-28331050 cronjob-controller logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:10:04 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2833105h9htq.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-index-cleaner-rhel8@sha256:08ca2463363916637592e6c1cc1731784e07860269292b216db3e6fd0eb44382" in 3.957s (3.957s including waiting) kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:10:04 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2833105h9htq.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Created Created container test-es-index-cleaner-with-prefix-es-index-cleaner kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:10:04 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2833105h9htq.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Started Started container test-es-index-cleaner-with-prefix-es-index-cleaner kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:10:06 +0000 UTC Normal Job.batch test-es-index-cleaner-with-prefix-es-index-cleaner-28331050 Completed Job completed job-controller logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:10:06 +0000 UTC Normal CronJob.batch test-es-index-cleaner-with-prefix-es-index-cleaner SawCompletedJob Saw completed job: test-es-index-cleaner-with-prefix-es-index-cleaner-28331050, status: Complete cronjob-controller logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:10:10 +0000 UTC Normal Pod 00-check-indices-lbvlm Binding Scheduled Successfully assigned kuttl-test-crack-fox/00-check-indices-lbvlm to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:10:10 +0000 UTC Normal Pod 00-check-indices-lbvlm AddedInterface Add eth0 [10.131.0.56/23] from ovn-kubernetes logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:10:10 +0000 UTC Normal Pod 00-check-indices-lbvlm.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:10:10 +0000 UTC Normal Pod 00-check-indices-lbvlm.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:10:10 +0000 UTC Normal Pod 00-check-indices-lbvlm.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:10:10 +0000 UTC Normal Job.batch 00-check-indices SuccessfulCreate Created pod: 00-check-indices-lbvlm job-controller logger.go:42: 08:10:13 | es-index-cleaner-autoprov | 2023-11-13 08:10:12 +0000 UTC Normal Job.batch 00-check-indices Completed Job completed job-controller logger.go:42: 08:10:13 | es-index-cleaner-autoprov | Deleting namespace: kuttl-test-crack-fox === CONT kuttl/harness/es-from-aio-to-production logger.go:42: 08:10:19 | es-from-aio-to-production | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:10:19 | es-from-aio-to-production | Creating namespace: kuttl-test-living-mosquito logger.go:42: 08:10:19 | es-from-aio-to-production/0-install | starting test step 0-install logger.go:42: 08:10:19 | es-from-aio-to-production/0-install | Jaeger:kuttl-test-living-mosquito/my-jaeger created logger.go:42: 08:10:29 | es-from-aio-to-production/0-install | test step completed 0-install logger.go:42: 08:10:29 | es-from-aio-to-production/1-smoke-test | starting test step 1-smoke-test logger.go:42: 08:10:29 | es-from-aio-to-production/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 08:10:30 | es-from-aio-to-production/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:10:36 | es-from-aio-to-production/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:10:37 | es-from-aio-to-production/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:10:37 | es-from-aio-to-production/1-smoke-test | job.batch/report-span created logger.go:42: 08:10:37 | es-from-aio-to-production/1-smoke-test | job.batch/check-span created logger.go:42: 08:10:49 | es-from-aio-to-production/1-smoke-test | test step completed 1-smoke-test logger.go:42: 08:10:49 | es-from-aio-to-production/3-install | starting test step 3-install logger.go:42: 08:10:49 | es-from-aio-to-production/3-install | Jaeger:kuttl-test-living-mosquito/my-jaeger updated logger.go:42: 08:11:22 | es-from-aio-to-production/3-install | test step completed 3-install logger.go:42: 08:11:22 | es-from-aio-to-production/4-smoke-test | starting test step 4-smoke-test logger.go:42: 08:11:22 | es-from-aio-to-production/4-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 08:11:29 | es-from-aio-to-production/4-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:11:30 | es-from-aio-to-production/4-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:11:30 | es-from-aio-to-production/4-smoke-test | job.batch/report-span unchanged logger.go:42: 08:11:30 | es-from-aio-to-production/4-smoke-test | job.batch/check-span unchanged logger.go:42: 08:11:30 | es-from-aio-to-production/4-smoke-test | test step completed 4-smoke-test logger.go:42: 08:11:30 | es-from-aio-to-production | es-from-aio-to-production events from ns kuttl-test-living-mosquito: logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:23 +0000 UTC Normal Pod my-jaeger-6dc756d7fb-bwpbv Binding Scheduled Successfully assigned kuttl-test-living-mosquito/my-jaeger-6dc756d7fb-bwpbv to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:23 +0000 UTC Normal Pod my-jaeger-6dc756d7fb-bwpbv AddedInterface Add eth0 [10.131.0.57/23] from ovn-kubernetes logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:23 +0000 UTC Normal Pod my-jaeger-6dc756d7fb-bwpbv.spec.containers{jaeger} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:23 +0000 UTC Normal ReplicaSet.apps my-jaeger-6dc756d7fb SuccessfulCreate Created pod: my-jaeger-6dc756d7fb-bwpbv replicaset-controller logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:23 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-6dc756d7fb to 1 deployment-controller logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:26 +0000 UTC Normal Pod my-jaeger-6dc756d7fb-bwpbv.spec.containers{jaeger} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" in 3.148s (3.148s including waiting) kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:26 +0000 UTC Normal Pod my-jaeger-6dc756d7fb-bwpbv.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:26 +0000 UTC Normal Pod my-jaeger-6dc756d7fb-bwpbv.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:26 +0000 UTC Normal Pod my-jaeger-6dc756d7fb-bwpbv.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:27 +0000 UTC Normal Pod my-jaeger-6dc756d7fb-bwpbv.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:27 +0000 UTC Normal Pod my-jaeger-6dc756d7fb-bwpbv.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:32 +0000 UTC Normal Pod my-jaeger-6dc756d7fb-bwpbv.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:32 +0000 UTC Normal Pod my-jaeger-6dc756d7fb-bwpbv.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:32 +0000 UTC Normal ReplicaSet.apps my-jaeger-6dc756d7fb SuccessfulDelete Deleted pod: my-jaeger-6dc756d7fb-bwpbv replicaset-controller logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:32 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-6dc756d7fb to 0 from 1 deployment-controller logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:33 +0000 UTC Normal Pod my-jaeger-54f79544f8-g2gzd Binding Scheduled Successfully assigned kuttl-test-living-mosquito/my-jaeger-54f79544f8-g2gzd to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:33 +0000 UTC Normal ReplicaSet.apps my-jaeger-54f79544f8 SuccessfulCreate Created pod: my-jaeger-54f79544f8-g2gzd replicaset-controller logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:33 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-54f79544f8 to 1 deployment-controller logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:34 +0000 UTC Normal Pod my-jaeger-54f79544f8-g2gzd AddedInterface Add eth0 [10.131.0.58/23] from ovn-kubernetes logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:34 +0000 UTC Normal Pod my-jaeger-54f79544f8-g2gzd.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:34 +0000 UTC Normal Pod my-jaeger-54f79544f8-g2gzd.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:34 +0000 UTC Normal Pod my-jaeger-54f79544f8-g2gzd.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:34 +0000 UTC Normal Pod my-jaeger-54f79544f8-g2gzd.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:34 +0000 UTC Normal Pod my-jaeger-54f79544f8-g2gzd.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:34 +0000 UTC Normal Pod my-jaeger-54f79544f8-g2gzd.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:37 +0000 UTC Normal Pod check-span-v6fqh Binding Scheduled Successfully assigned kuttl-test-living-mosquito/check-span-v6fqh to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:37 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-v6fqh job-controller logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:37 +0000 UTC Normal Pod report-span-57qc9 Binding Scheduled Successfully assigned kuttl-test-living-mosquito/report-span-57qc9 to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:37 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-57qc9 job-controller logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:38 +0000 UTC Normal Pod check-span-v6fqh AddedInterface Add eth0 [10.129.2.36/23] from ovn-kubernetes logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:38 +0000 UTC Normal Pod check-span-v6fqh.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:38 +0000 UTC Normal Pod check-span-v6fqh.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:38 +0000 UTC Normal Pod check-span-v6fqh.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:38 +0000 UTC Normal Pod report-span-57qc9 AddedInterface Add eth0 [10.129.2.35/23] from ovn-kubernetes logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:38 +0000 UTC Normal Pod report-span-57qc9.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:38 +0000 UTC Normal Pod report-span-57qc9.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:38 +0000 UTC Normal Pod report-span-57qc9.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:48 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:53 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlivingmosquitomyjaeger-1-7476f84z8kw Binding Scheduled Successfully assigned kuttl-test-living-mosquito/elasticsearch-cdm-kuttltestlivingmosquitomyjaeger-1-7476f84z8kw to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:53 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlivingmosquitomyjaeger-1-7476f84z8kw AddedInterface Add eth0 [10.128.2.27/23] from ovn-kubernetes logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:53 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlivingmosquitomyjaeger-1-7476f84z8kw.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:53 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlivingmosquitomyjaeger-1-7476f84z8kw.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:53 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlivingmosquitomyjaeger-1-7476f84z8kw.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:53 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlivingmosquitomyjaeger-1-7476f84z8kw.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:53 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlivingmosquitomyjaeger-1-7476f84z8kw.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:53 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlivingmosquitomyjaeger-1-7476f84z8kw.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:53 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestlivingmosquitomyjaeger-1-7476f8986b SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestlivingmosquitomyjaeger-1-7476f84z8kw replicaset-controller logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:10:53 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestlivingmosquitomyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestlivingmosquitomyjaeger-1-7476f8986b to 1 deployment-controller logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:03 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestlivingmosquitomyjaeger-1-7476f84z8kw.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:08 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestlivingmosquitomyjaeger-1-7476f84z8kw.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:10 +0000 UTC Normal Job.batch report-span Completed Job completed job-controller logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:18 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-btmmk Binding Scheduled Successfully assigned kuttl-test-living-mosquito/my-jaeger-collector-5489f5bd9b-btmmk to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:18 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5489f5bd9b SuccessfulCreate Created pod: my-jaeger-collector-5489f5bd9b-btmmk replicaset-controller logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:18 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-5489f5bd9b to 1 deployment-controller logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:18 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-7459f55c7c to 1 deployment-controller logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:19 +0000 UTC Normal Pod my-jaeger-54f79544f8-g2gzd.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:19 +0000 UTC Normal Pod my-jaeger-54f79544f8-g2gzd.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:19 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-btmmk AddedInterface Add eth0 [10.129.2.37/23] from ovn-kubernetes logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:19 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-btmmk.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:19 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-btmmk.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:19 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-btmmk.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:19 +0000 UTC Normal Pod my-jaeger-query-7459f55c7c-n75xw Binding Scheduled Successfully assigned kuttl-test-living-mosquito/my-jaeger-query-7459f55c7c-n75xw to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:19 +0000 UTC Normal Pod my-jaeger-query-7459f55c7c-n75xw AddedInterface Add eth0 [10.129.2.38/23] from ovn-kubernetes logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:19 +0000 UTC Normal Pod my-jaeger-query-7459f55c7c-n75xw.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:19 +0000 UTC Normal Pod my-jaeger-query-7459f55c7c-n75xw.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:19 +0000 UTC Normal Pod my-jaeger-query-7459f55c7c-n75xw.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:19 +0000 UTC Normal Pod my-jaeger-query-7459f55c7c-n75xw.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:19 +0000 UTC Normal Pod my-jaeger-query-7459f55c7c-n75xw.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:19 +0000 UTC Normal Pod my-jaeger-query-7459f55c7c-n75xw.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:19 +0000 UTC Normal Pod my-jaeger-query-7459f55c7c-n75xw.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:19 +0000 UTC Normal Pod my-jaeger-query-7459f55c7c-n75xw.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:19 +0000 UTC Normal Pod my-jaeger-query-7459f55c7c-n75xw.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:11:30 | es-from-aio-to-production | 2023-11-13 08:11:19 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-7459f55c7c SuccessfulCreate Created pod: my-jaeger-query-7459f55c7c-n75xw replicaset-controller logger.go:42: 08:11:30 | es-from-aio-to-production | Deleting namespace: kuttl-test-living-mosquito === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (860.87s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.92s) --- PASS: kuttl/harness/es-multiinstance (106.78s) --- PASS: kuttl/harness/es-streaming-autoprovisioned (213.93s) --- PASS: kuttl/harness/es-simple-prod (5.81s) --- PASS: kuttl/harness/es-rollover-autoprov (216.57s) --- PASS: kuttl/harness/es-increasing-replicas (106.86s) --- PASS: kuttl/harness/es-index-cleaner-autoprov (127.34s) --- PASS: kuttl/harness/es-from-aio-to-production (77.51s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name elasticsearch --report --output /logs/artifacts/elasticsearch.xml ./artifacts/kuttl-report.xml time="2023-11-13T08:11:40Z" level=debug msg="Setting a new name for the test suites" time="2023-11-13T08:11:40Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-11-13T08:11:40Z" level=debug msg="normalizing test case names" time="2023-11-13T08:11:40Z" level=debug msg="elasticsearch/artifacts -> elasticsearch_artifacts" time="2023-11-13T08:11:40Z" level=debug msg="elasticsearch/es-multiinstance -> elasticsearch_es_multiinstance" time="2023-11-13T08:11:40Z" level=debug msg="elasticsearch/es-streaming-autoprovisioned -> elasticsearch_es_streaming_autoprovisioned" time="2023-11-13T08:11:40Z" level=debug msg="elasticsearch/es-simple-prod -> elasticsearch_es_simple_prod" time="2023-11-13T08:11:40Z" level=debug msg="elasticsearch/es-rollover-autoprov -> elasticsearch_es_rollover_autoprov" time="2023-11-13T08:11:40Z" level=debug msg="elasticsearch/es-increasing-replicas -> elasticsearch_es_increasing_replicas" time="2023-11-13T08:11:40Z" level=debug msg="elasticsearch/es-index-cleaner-autoprov -> elasticsearch_es_index_cleaner_autoprov" time="2023-11-13T08:11:40Z" level=debug msg="elasticsearch/es-from-aio-to-production -> elasticsearch_es_from_aio_to_production" +--------------------------------------------+--------+ | NAME | RESULT | +--------------------------------------------+--------+ | elasticsearch_artifacts | passed | | elasticsearch_es_multiinstance | passed | | elasticsearch_es_streaming_autoprovisioned | passed | | elasticsearch_es_simple_prod | passed | | elasticsearch_es_rollover_autoprov | passed | | elasticsearch_es_increasing_replicas | passed | | elasticsearch_es_index_cleaner_autoprov | passed | | elasticsearch_es_from_aio_to_production | passed | +--------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + '[' 0 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh examples false true + '[' 3 -ne 3 ']' + test_suite_name=examples + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/examples.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-examples make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=0.32.0 \ SKIP_KAFKA=false \ VERTX_IMG=jaegertracing/vertx-create-span:operator-e2e-tests \ ./tests/e2e/examples/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-08-062604 True False 23m Cluster version is 4.15.0-0.nightly-2023-11-08-062604' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-08-062604 True False 23m Cluster version is 4.15.0-0.nightly-2023-11-08-062604' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 0.32.0 ']' ++ version_le 0.32.0 0.25.0 +++ echo 0.32.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 0.32.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/examples/render.sh ++ export SUITE_DIR=./tests/e2e/examples ++ SUITE_DIR=./tests/e2e/examples ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/examples ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test examples-agent-as-daemonset + '[' 1 -ne 1 ']' + test_name=examples-agent-as-daemonset + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-as-daemonset' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-as-daemonset\e[0m' Rendering files for test examples-agent-as-daemonset + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build + '[' _build '!=' _build ']' + mkdir -p examples-agent-as-daemonset + cd examples-agent-as-daemonset + example_name=agent-as-daemonset + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + render_install_example agent-as-daemonset 01 + '[' 2 -ne 2 ']' + example_name=agent-as-daemonset + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/agent-as-daemonset.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + JAEGER_NAME=agent-as-daemonset + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=DaemonSet ++ '[' DaemonSet = null ']' ++ echo DaemonSet ++ return 0 + jaeger_strategy=DaemonSet + '[' DaemonSet = DaemonSet ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example agent-as-daemonset 02 + '[' 2 -ne 2 ']' + example_name=agent-as-daemonset + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/agent-as-daemonset.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/agent-as-daemonset.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/agent-as-daemonset.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/agent-as-daemonset.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + jaeger_name=agent-as-daemonset + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test agent-as-daemonset true 02 + '[' 3 -ne 3 ']' + jaeger=agent-as-daemonset + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-agent-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-agent-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-with-priority-class\e[0m' Rendering files for test examples-agent-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-as-daemonset + '[' examples-agent-as-daemonset '!=' _build ']' + cd .. + mkdir -p examples-agent-with-priority-class + cd examples-agent-with-priority-class + example_name=agent-with-priority-class + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + render_install_example agent-with-priority-class 01 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/agent-with-priority-class.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + JAEGER_NAME=agent-as-daemonset + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=DaemonSet ++ '[' DaemonSet = null ']' ++ echo DaemonSet ++ return 0 + jaeger_strategy=DaemonSet + '[' DaemonSet = DaemonSet ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example agent-with-priority-class 02 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + jaeger_name=agent-as-daemonset + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test agent-as-daemonset true 02 + '[' 3 -ne 3 ']' + jaeger=agent-as-daemonset + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-all-in-one-with-options + '[' 1 -ne 1 ']' + test_name=examples-all-in-one-with-options + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-all-in-one-with-options' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-all-in-one-with-options\e[0m' Rendering files for test examples-all-in-one-with-options + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-with-priority-class + '[' examples-agent-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-all-in-one-with-options + cd examples-all-in-one-with-options + example_name=all-in-one-with-options + render_install_example all-in-one-with-options 00 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/all-in-one-with-options.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + JAEGER_NAME=my-jaeger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.metadata.name="my-jaeger"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i 'del(.spec.allInOne.image)' ./00-install.yaml + render_smoke_test_example all-in-one-with-options 01 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + jaeger_name=my-jaeger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + sed -i s~my-jaeger-query:443~my-jaeger-query:443/jaeger~gi ./01-smoke-test.yaml + '[' false = true ']' + start_test examples-auto-provision-kafka + '[' 1 -ne 1 ']' + test_name=examples-auto-provision-kafka + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-auto-provision-kafka' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-auto-provision-kafka\e[0m' Rendering files for test examples-auto-provision-kafka + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-all-in-one-with-options + '[' examples-all-in-one-with-options '!=' _build ']' + cd .. + mkdir -p examples-auto-provision-kafka + cd examples-auto-provision-kafka + example_name=auto-provision-kafka + render_install_kafka_operator 01 + '[' 1 -ne 1 ']' + test_step=01 + '[' true '!=' true ']' + render_install_example auto-provision-kafka 02 + '[' 2 -ne 2 ']' + example_name=auto-provision-kafka + test_step=02 + install_file=./02-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/auto-provision-kafka.yaml -o ./02-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./02-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./02-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./02-install.yaml ++ jaeger_name=auto-provision-kafka ++ '[' -z auto-provision-kafka ']' ++ echo auto-provision-kafka ++ return 0 + JAEGER_NAME=auto-provision-kafka + local jaeger_strategy ++ get_jaeger_strategy ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./02-install.yaml ++ strategy=streaming ++ '[' streaming = production ']' ++ '[' streaming = streaming ']' ++ echo streaming ++ return 0 + jaeger_strategy=streaming + '[' streaming = DaemonSet ']' + '[' streaming = allInOne ']' + '[' streaming = production ']' + '[' streaming = streaming ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/streaming-jaeger-assert.yaml.template -o ./02-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./02-install.yaml + mv ./02-assert.yaml ./05-assert.yaml + render_assert_kafka true auto-provision-kafka 02 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provision-kafka + test_step=02 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].envXX | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].envXX | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].envXX | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provision-kafka + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./02-assert.yaml ++ expr 02 + 1 + CLUSTER_NAME=auto-provision-kafka + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./3-assert.yaml ++ expr 02 + 2 + CLUSTER_NAME=auto-provision-kafka + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./04-assert.yaml + render_smoke_test_example auto-provision-kafka 06 + '[' 2 -ne 2 ']' + example_name=auto-provision-kafka + test_step=06 + deployment_file=/tmp/jaeger-tests/examples/auto-provision-kafka.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/auto-provision-kafka.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/auto-provision-kafka.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/auto-provision-kafka.yaml ++ jaeger_name=auto-provision-kafka ++ '[' -z auto-provision-kafka ']' ++ echo auto-provision-kafka ++ return 0 + jaeger_name=auto-provision-kafka + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test auto-provision-kafka true 06 + '[' 3 -ne 3 ']' + jaeger=auto-provision-kafka + is_secured=true + test_step=06 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://auto-provision-kafka-query:443 + JAEGER_QUERY_ENDPOINT=https://auto-provision-kafka-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://auto-provision-kafka-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://auto-provision-kafka-collector-headless:14268 + export JAEGER_NAME=auto-provision-kafka + JAEGER_NAME=auto-provision-kafka + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./06-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-business-application-injected-sidecar + '[' 1 -ne 1 ']' + test_name=examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-business-application-injected-sidecar' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-business-application-injected-sidecar\e[0m' Rendering files for test examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-auto-provision-kafka + '[' examples-auto-provision-kafka '!=' _build ']' + cd .. + mkdir -p examples-business-application-injected-sidecar + cd examples-business-application-injected-sidecar + example_name=simplest + cp /tmp/jaeger-tests/examples/business-application-injected-sidecar.yaml ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].image=strenv(VERTX_IMG)' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.path="/"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.port=8080' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.initialDelaySeconds=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.failureThreshold=3' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.periodSeconds=10' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.successThreshold=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.timeoutSeconds=1' ./00-install.yaml + render_install_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example simplest 02 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 02 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-collector-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-collector-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-collector-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-collector-with-priority-class\e[0m' Rendering files for test examples-collector-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-business-application-injected-sidecar + '[' examples-business-application-injected-sidecar '!=' _build ']' + cd .. + mkdir -p examples-collector-with-priority-class + cd examples-collector-with-priority-class + example_name=collector-with-priority-class + render_install_example collector-with-priority-class 00 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/collector-with-priority-class.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + JAEGER_NAME=collector-with-high-priority + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example collector-with-priority-class 01 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + jaeger_name=collector-with-high-priority + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test collector-with-high-priority true 01 + '[' 3 -ne 3 ']' + jaeger=collector-with-high-priority + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + export JAEGER_NAME=collector-with-high-priority + JAEGER_NAME=collector-with-high-priority + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-service-types + '[' 1 -ne 1 ']' + test_name=examples-service-types + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-service-types' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-service-types\e[0m' Rendering files for test examples-service-types + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-collector-with-priority-class + '[' examples-collector-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-service-types + cd examples-service-types + example_name=service-types + render_install_example service-types 00 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/service-types.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + JAEGER_NAME=service-types + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example service-types 01 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/service-types.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/service-types.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/service-types.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/service-types.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + jaeger_name=service-types + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test service-types true 01 + '[' 3 -ne 3 ']' + jaeger=service-types + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + export JAEGER_NAME=service-types + JAEGER_NAME=service-types + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod + '[' 1 -ne 1 ']' + test_name=examples-simple-prod + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod\e[0m' Rendering files for test examples-simple-prod + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-service-types + '[' examples-service-types '!=' _build ']' + cd .. + mkdir -p examples-simple-prod + cd examples-simple-prod + example_name=simple-prod + render_install_example simple-prod 01 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod 02 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod-with-volumes + '[' 1 -ne 1 ']' + test_name=examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod-with-volumes' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod-with-volumes\e[0m' Rendering files for test examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod + '[' examples-simple-prod '!=' _build ']' + cd .. + mkdir -p examples-simple-prod-with-volumes + cd examples-simple-prod-with-volumes + example_name=simple-prod-with-volumes + render_install_example simple-prod-with-volumes 01 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod-with-volumes 02 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + /tmp/jaeger-tests/bin/gomplate -f ./03-check-volume.yaml.template -o 03-check-volume.yaml + start_test examples-simplest + '[' 1 -ne 1 ']' + test_name=examples-simplest + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simplest' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simplest\e[0m' Rendering files for test examples-simplest + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod-with-volumes + '[' examples-simple-prod-with-volumes '!=' _build ']' + cd .. + mkdir -p examples-simplest + cd examples-simplest + example_name=simplest + render_install_example simplest 00 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 01 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger + '[' 1 -ne 1 ']' + test_name=examples-with-badger + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger\e[0m' Rendering files for test examples-with-badger + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simplest + '[' examples-simplest '!=' _build ']' + cd .. + mkdir -p examples-with-badger + cd examples-with-badger + example_name=with-badger + render_install_example with-badger 00 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + JAEGER_NAME=with-badger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger 01 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + jaeger_name=with-badger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + export JAEGER_NAME=with-badger + JAEGER_NAME=with-badger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger-and-volume + '[' 1 -ne 1 ']' + test_name=examples-with-badger-and-volume + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger-and-volume' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger-and-volume\e[0m' Rendering files for test examples-with-badger-and-volume + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger + '[' examples-with-badger '!=' _build ']' + cd .. + mkdir -p examples-with-badger-and-volume + cd examples-with-badger-and-volume + example_name=with-badger-and-volume + render_install_example with-badger-and-volume 00 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger-and-volume.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + JAEGER_NAME=with-badger-and-volume + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger-and-volume 01 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + jaeger_name=with-badger-and-volume + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger-and-volume true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger-and-volume + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + export JAEGER_NAME=with-badger-and-volume + JAEGER_NAME=with-badger-and-volume + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-cassandra + '[' 1 -ne 1 ']' + test_name=examples-with-cassandra + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-cassandra' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-cassandra\e[0m' Rendering files for test examples-with-cassandra + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger-and-volume + '[' examples-with-badger-and-volume '!=' _build ']' + cd .. + mkdir -p examples-with-cassandra + cd examples-with-cassandra + example_name=with-cassandra + render_install_cassandra 00 + '[' 1 -ne 1 ']' + test_step=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-assert.yaml.template -o ./00-assert.yaml + render_install_example with-cassandra 01 + '[' 2 -ne 2 ']' + example_name=with-cassandra + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-cassandra.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=with-cassandra ++ '[' -z with-cassandra ']' ++ echo with-cassandra ++ return 0 + JAEGER_NAME=with-cassandra + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example with-cassandra 02 + '[' 2 -ne 2 ']' + example_name=with-cassandra + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/with-cassandra.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-cassandra.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-cassandra.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-cassandra.yaml ++ jaeger_name=with-cassandra ++ '[' -z with-cassandra ']' ++ echo with-cassandra ++ return 0 + jaeger_name=with-cassandra + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-cassandra true 02 + '[' 3 -ne 3 ']' + jaeger=with-cassandra + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 + JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 + export JAEGER_NAME=with-cassandra + JAEGER_NAME=with-cassandra + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-sampling + '[' 1 -ne 1 ']' + test_name=examples-with-sampling + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-sampling' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-sampling\e[0m' Rendering files for test examples-with-sampling + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-cassandra + '[' examples-with-cassandra '!=' _build ']' + cd .. + mkdir -p examples-with-sampling + cd examples-with-sampling + export example_name=with-sampling + example_name=with-sampling + render_install_cassandra 00 + '[' 1 -ne 1 ']' + test_step=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-assert.yaml.template -o ./00-assert.yaml + render_install_example with-sampling 01 + '[' 2 -ne 2 ']' + example_name=with-sampling + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-sampling.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=with-sampling ++ '[' -z with-sampling ']' ++ echo with-sampling ++ return 0 + JAEGER_NAME=with-sampling + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example with-sampling 02 + '[' 2 -ne 2 ']' + example_name=with-sampling + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/with-sampling.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-sampling.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-sampling.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-sampling.yaml ++ jaeger_name=with-sampling ++ '[' -z with-sampling ']' ++ echo with-sampling ++ return 0 + jaeger_name=with-sampling + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-sampling true 02 + '[' 3 -ne 3 ']' + jaeger=with-sampling + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 + JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 + export JAEGER_NAME=with-sampling + JAEGER_NAME=with-sampling + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + start_test examples-openshift-agent-as-daemonset + '[' 1 -ne 1 ']' + test_name=examples-openshift-agent-as-daemonset + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-openshift-agent-as-daemonset' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-openshift-agent-as-daemonset\e[0m' Rendering files for test examples-openshift-agent-as-daemonset + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-sampling + '[' examples-with-sampling '!=' _build ']' + cd .. + mkdir -p examples-openshift-agent-as-daemonset + cd examples-openshift-agent-as-daemonset + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/agent-as-daemonset.yaml -o 02-install.yaml + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./02-assert.yaml + render_install_vertx 03 + '[' 1 -ne 1 ']' + test_step=03 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./03-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].env=[{"name": "JAEGER_AGENT_HOST", "valueFrom": {"fieldRef": {"apiVersion": "v1", "fieldPath": "status.hostIP"}}}]' ./03-install.yaml + render_find_service agent-as-daemonset production order 00 04 + '[' 5 -ne 5 ']' + jaeger=agent-as-daemonset + deployment_strategy=production + service_name=order + job_number=00 + test_step=04 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' production '!=' allInOne ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/find-service.yaml.template -o ./04-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + start_test examples-openshift-with-htpasswd + '[' 1 -ne 1 ']' + test_name=examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-openshift-with-htpasswd' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-openshift-with-htpasswd\e[0m' Rendering files for test examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-openshift-agent-as-daemonset + '[' examples-openshift-agent-as-daemonset '!=' _build ']' + cd .. + mkdir -p examples-openshift-with-htpasswd + cd examples-openshift-with-htpasswd + export JAEGER_NAME=with-htpasswd + JAEGER_NAME=with-htpasswd + export JAEGER_USERNAME=awesomeuser + JAEGER_USERNAME=awesomeuser + export JAEGER_PASSWORD=awesomepassword + JAEGER_PASSWORD=awesomepassword + export 'JAEGER_USER_PASSWORD_HASH=awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' + JAEGER_USER_PASSWORD_HASH='awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ echo 'awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ base64 + SECRET=YXdlc29tZXVzZXI6e1NIQX11VWRxUFZVeXFOQm1FUlUwUXhqM0tGYVpuanc9Cg== + /tmp/jaeger-tests/bin/gomplate -f ./00-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/with-htpasswd.yaml -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + export 'GET_URL_COMMAND=kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + export 'URL=https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + INSECURE=true + JAEGER_USERNAME= + JAEGER_PASSWORD= + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-unsecured.yaml + JAEGER_USERNAME=wronguser + JAEGER_PASSWORD=wrongpassword + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-check-unauthorized.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./04-check-authorized.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running examples E2E tests' Running examples E2E tests + cd tests/e2e/examples/_build + set +e + KUBECONFIG=/tmp/kubeconfig-894131460 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 17 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/examples-agent-as-daemonset === PAUSE kuttl/harness/examples-agent-as-daemonset === RUN kuttl/harness/examples-agent-with-priority-class === PAUSE kuttl/harness/examples-agent-with-priority-class === RUN kuttl/harness/examples-all-in-one-with-options === PAUSE kuttl/harness/examples-all-in-one-with-options === RUN kuttl/harness/examples-auto-provision-kafka === PAUSE kuttl/harness/examples-auto-provision-kafka === RUN kuttl/harness/examples-business-application-injected-sidecar === PAUSE kuttl/harness/examples-business-application-injected-sidecar === RUN kuttl/harness/examples-collector-with-priority-class === PAUSE kuttl/harness/examples-collector-with-priority-class === RUN kuttl/harness/examples-openshift-agent-as-daemonset === PAUSE kuttl/harness/examples-openshift-agent-as-daemonset === RUN kuttl/harness/examples-openshift-with-htpasswd === PAUSE kuttl/harness/examples-openshift-with-htpasswd === RUN kuttl/harness/examples-service-types === PAUSE kuttl/harness/examples-service-types === RUN kuttl/harness/examples-simple-prod === PAUSE kuttl/harness/examples-simple-prod === RUN kuttl/harness/examples-simple-prod-with-volumes === PAUSE kuttl/harness/examples-simple-prod-with-volumes === RUN kuttl/harness/examples-simplest === PAUSE kuttl/harness/examples-simplest === RUN kuttl/harness/examples-with-badger === PAUSE kuttl/harness/examples-with-badger === RUN kuttl/harness/examples-with-badger-and-volume === PAUSE kuttl/harness/examples-with-badger-and-volume === RUN kuttl/harness/examples-with-cassandra === PAUSE kuttl/harness/examples-with-cassandra === RUN kuttl/harness/examples-with-sampling === PAUSE kuttl/harness/examples-with-sampling === CONT kuttl/harness/artifacts logger.go:42: 08:12:18 | artifacts | Creating namespace: kuttl-test-secure-seahorse logger.go:42: 08:12:18 | artifacts | artifacts events from ns kuttl-test-secure-seahorse: logger.go:42: 08:12:18 | artifacts | Deleting namespace: kuttl-test-secure-seahorse === CONT kuttl/harness/examples-service-types logger.go:42: 08:12:24 | examples-service-types | Creating namespace: kuttl-test-capable-goat logger.go:42: 08:12:24 | examples-service-types/0-install | starting test step 0-install logger.go:42: 08:12:24 | examples-service-types/0-install | Jaeger:kuttl-test-capable-goat/service-types created logger.go:42: 08:12:30 | examples-service-types/0-install | test step completed 0-install logger.go:42: 08:12:30 | examples-service-types/1-smoke-test | starting test step 1-smoke-test logger.go:42: 08:12:30 | examples-service-types/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE service-types /dev/null] logger.go:42: 08:12:31 | examples-service-types/1-smoke-test | Warning: resource jaegers/service-types is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:12:38 | examples-service-types/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://service-types-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:12:38 | examples-service-types/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:12:38 | examples-service-types/1-smoke-test | job.batch/report-span created logger.go:42: 08:12:39 | examples-service-types/1-smoke-test | job.batch/check-span created logger.go:42: 08:12:45 | examples-service-types/1-smoke-test | test step completed 1-smoke-test logger.go:42: 08:12:45 | examples-service-types/2- | starting test step 2- logger.go:42: 08:12:45 | examples-service-types/2- | test step completed 2- logger.go:42: 08:12:45 | examples-service-types | examples-service-types events from ns kuttl-test-capable-goat: logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:27 +0000 UTC Normal Pod service-types-77b89f9fcd-fsrs6 Binding Scheduled Successfully assigned kuttl-test-capable-goat/service-types-77b89f9fcd-fsrs6 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:27 +0000 UTC Normal ReplicaSet.apps service-types-77b89f9fcd SuccessfulCreate Created pod: service-types-77b89f9fcd-fsrs6 replicaset-controller logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:27 +0000 UTC Normal Service service-types-collector EnsuringLoadBalancer Ensuring load balancer service-controller logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:27 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-77b89f9fcd to 1 deployment-controller logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:28 +0000 UTC Normal Pod service-types-77b89f9fcd-fsrs6 AddedInterface Add eth0 [10.131.0.59/23] from ovn-kubernetes logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:28 +0000 UTC Normal Pod service-types-77b89f9fcd-fsrs6.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:28 +0000 UTC Normal Pod service-types-77b89f9fcd-fsrs6.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:28 +0000 UTC Normal Pod service-types-77b89f9fcd-fsrs6.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:28 +0000 UTC Normal Pod service-types-77b89f9fcd-fsrs6.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:28 +0000 UTC Normal Pod service-types-77b89f9fcd-fsrs6.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:28 +0000 UTC Normal Pod service-types-77b89f9fcd-fsrs6.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:30 +0000 UTC Normal Service service-types-collector EnsuredLoadBalancer Ensured load balancer service-controller logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:30 +0000 UTC Normal Service service-types-query EnsuringLoadBalancer Ensuring load balancer service-controller logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:33 +0000 UTC Normal Service service-types-query EnsuredLoadBalancer Ensured load balancer service-controller logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:34 +0000 UTC Normal Pod service-types-77b89f9fcd-fsrs6.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:34 +0000 UTC Normal Pod service-types-77b89f9fcd-fsrs6.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:34 +0000 UTC Normal ReplicaSet.apps service-types-77b89f9fcd SuccessfulDelete Deleted pod: service-types-77b89f9fcd-fsrs6 replicaset-controller logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:34 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled down replica set service-types-77b89f9fcd to 0 from 1 deployment-controller logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:35 +0000 UTC Normal Pod service-types-65dc9b8875-bll64 Binding Scheduled Successfully assigned kuttl-test-capable-goat/service-types-65dc9b8875-bll64 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:35 +0000 UTC Normal Pod service-types-65dc9b8875-bll64 AddedInterface Add eth0 [10.131.0.60/23] from ovn-kubernetes logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:35 +0000 UTC Normal Pod service-types-65dc9b8875-bll64.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:35 +0000 UTC Normal Pod service-types-65dc9b8875-bll64.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:35 +0000 UTC Normal Pod service-types-65dc9b8875-bll64.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:35 +0000 UTC Normal Pod service-types-65dc9b8875-bll64.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:35 +0000 UTC Normal Pod service-types-65dc9b8875-bll64.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:35 +0000 UTC Normal Pod service-types-65dc9b8875-bll64.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:35 +0000 UTC Normal ReplicaSet.apps service-types-65dc9b8875 SuccessfulCreate Created pod: service-types-65dc9b8875-bll64 replicaset-controller logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:35 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-65dc9b8875 to 1 deployment-controller logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:38 +0000 UTC Normal Pod report-span-29qrp Binding Scheduled Successfully assigned kuttl-test-capable-goat/report-span-29qrp to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:38 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-29qrp job-controller logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:39 +0000 UTC Normal Pod check-span-qm9kb Binding Scheduled Successfully assigned kuttl-test-capable-goat/check-span-qm9kb to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:39 +0000 UTC Normal Pod check-span-qm9kb AddedInterface Add eth0 [10.128.2.28/23] from ovn-kubernetes logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:39 +0000 UTC Normal Pod check-span-qm9kb.spec.containers{asserts-container} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" kubelet logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:39 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-qm9kb job-controller logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:39 +0000 UTC Normal Pod report-span-29qrp AddedInterface Add eth0 [10.129.2.39/23] from ovn-kubernetes logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:39 +0000 UTC Normal Pod report-span-29qrp.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:39 +0000 UTC Normal Pod report-span-29qrp.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:39 +0000 UTC Normal Pod report-span-29qrp.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:41 +0000 UTC Normal Pod check-span-qm9kb.spec.containers{asserts-container} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" in 2.409s (2.409s including waiting) kubelet logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:41 +0000 UTC Normal Pod check-span-qm9kb.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:41 +0000 UTC Normal Pod check-span-qm9kb.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:12:45 | examples-service-types | 2023-11-13 08:12:44 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:12:45 | examples-service-types | Deleting namespace: kuttl-test-capable-goat === CONT kuttl/harness/examples-with-sampling logger.go:42: 08:13:16 | examples-with-sampling | Creating namespace: kuttl-test-proud-grub logger.go:42: 08:13:16 | examples-with-sampling/0-install | starting test step 0-install logger.go:42: 08:13:16 | examples-with-sampling/0-install | running command: [sh -c cd /tmp/jaeger-tests && make cassandra STORAGE_NAMESPACE=$NAMESPACE] logger.go:42: 08:13:16 | examples-with-sampling/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 08:13:16 | examples-with-sampling/0-install | >>>> Creating namespace kuttl-test-proud-grub logger.go:42: 08:13:16 | examples-with-sampling/0-install | kubectl create namespace kuttl-test-proud-grub 2>&1 | grep -v "already exists" || true logger.go:42: 08:13:16 | examples-with-sampling/0-install | kubectl create -f ./tests/cassandra.yml --namespace kuttl-test-proud-grub 2>&1 | grep -v "already exists" || true logger.go:42: 08:13:17 | examples-with-sampling/0-install | service/cassandra created logger.go:42: 08:13:17 | examples-with-sampling/0-install | statefulset.apps/cassandra created logger.go:42: 08:13:17 | examples-with-sampling/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 08:13:27 | examples-with-sampling/0-install | test step completed 0-install logger.go:42: 08:13:27 | examples-with-sampling/1-install | starting test step 1-install logger.go:42: 08:13:27 | examples-with-sampling/1-install | Jaeger:kuttl-test-proud-grub/with-sampling created logger.go:42: 08:13:33 | examples-with-sampling/1-install | test step completed 1-install logger.go:42: 08:13:33 | examples-with-sampling/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:13:33 | examples-with-sampling/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-sampling /dev/null] logger.go:42: 08:13:35 | examples-with-sampling/2-smoke-test | Warning: resource jaegers/with-sampling is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:13:41 | examples-with-sampling/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:13:41 | examples-with-sampling/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:13:42 | examples-with-sampling/2-smoke-test | job.batch/report-span created logger.go:42: 08:13:42 | examples-with-sampling/2-smoke-test | job.batch/check-span created logger.go:42: 08:13:54 | examples-with-sampling/2-smoke-test | test step completed 2-smoke-test logger.go:42: 08:13:54 | examples-with-sampling/3- | starting test step 3- logger.go:42: 08:13:54 | examples-with-sampling/3- | test step completed 3- logger.go:42: 08:13:54 | examples-with-sampling | examples-with-sampling events from ns kuttl-test-proud-grub: logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:17 +0000 UTC Normal Pod cassandra-0 Binding Scheduled Successfully assigned kuttl-test-proud-grub/cassandra-0 to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:17 +0000 UTC Normal Pod cassandra-0 AddedInterface Add eth0 [10.129.2.40/23] from ovn-kubernetes logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:17 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulling Pulling image "cassandra:3.11" kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:17 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-0 in StatefulSet cassandra successful statefulset-controller logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:21 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulled Successfully pulled image "cassandra:3.11" in 4.063s (4.063s including waiting) kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:21 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:22 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:22 +0000 UTC Normal Pod cassandra-1 Binding Scheduled Successfully assigned kuttl-test-proud-grub/cassandra-1 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:22 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-1 in StatefulSet cassandra successful statefulset-controller logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:23 +0000 UTC Normal Pod cassandra-1 AddedInterface Add eth0 [10.131.0.61/23] from ovn-kubernetes logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:23 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulling Pulling image "cassandra:3.11" kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:27 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulled Successfully pulled image "cassandra:3.11" in 4.182s (4.182s including waiting) kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:27 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:27 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:31 +0000 UTC Normal Pod with-sampling-7f488976d5-kzrp7 Binding Scheduled Successfully assigned kuttl-test-proud-grub/with-sampling-7f488976d5-kzrp7 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:31 +0000 UTC Normal Pod with-sampling-7f488976d5-kzrp7 AddedInterface Add eth0 [10.131.0.62/23] from ovn-kubernetes logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:31 +0000 UTC Normal Pod with-sampling-7f488976d5-kzrp7.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:31 +0000 UTC Normal Pod with-sampling-7f488976d5-kzrp7.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:31 +0000 UTC Normal Pod with-sampling-7f488976d5-kzrp7.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:31 +0000 UTC Normal Pod with-sampling-7f488976d5-kzrp7.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:31 +0000 UTC Normal Pod with-sampling-7f488976d5-kzrp7.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:31 +0000 UTC Normal Pod with-sampling-7f488976d5-kzrp7.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:31 +0000 UTC Normal ReplicaSet.apps with-sampling-7f488976d5 SuccessfulCreate Created pod: with-sampling-7f488976d5-kzrp7 replicaset-controller logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:31 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled up replica set with-sampling-7f488976d5 to 1 deployment-controller logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:37 +0000 UTC Normal Pod with-sampling-7f488976d5-kzrp7.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:37 +0000 UTC Normal Pod with-sampling-7f488976d5-kzrp7.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:37 +0000 UTC Normal ReplicaSet.apps with-sampling-7f488976d5 SuccessfulDelete Deleted pod: with-sampling-7f488976d5-kzrp7 replicaset-controller logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:37 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled down replica set with-sampling-7f488976d5 to 0 from 1 deployment-controller logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:38 +0000 UTC Normal Pod with-sampling-6d9b8898c5-2ljkr Binding Scheduled Successfully assigned kuttl-test-proud-grub/with-sampling-6d9b8898c5-2ljkr to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:38 +0000 UTC Normal Pod with-sampling-6d9b8898c5-2ljkr AddedInterface Add eth0 [10.131.0.63/23] from ovn-kubernetes logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:38 +0000 UTC Normal Pod with-sampling-6d9b8898c5-2ljkr.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:38 +0000 UTC Normal ReplicaSet.apps with-sampling-6d9b8898c5 SuccessfulCreate Created pod: with-sampling-6d9b8898c5-2ljkr replicaset-controller logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:38 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled up replica set with-sampling-6d9b8898c5 to 1 deployment-controller logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:39 +0000 UTC Normal Pod with-sampling-6d9b8898c5-2ljkr.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:39 +0000 UTC Normal Pod with-sampling-6d9b8898c5-2ljkr.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:39 +0000 UTC Normal Pod with-sampling-6d9b8898c5-2ljkr.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:39 +0000 UTC Normal Pod with-sampling-6d9b8898c5-2ljkr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:39 +0000 UTC Normal Pod with-sampling-6d9b8898c5-2ljkr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:42 +0000 UTC Normal Pod check-span-674r4 Binding Scheduled Successfully assigned kuttl-test-proud-grub/check-span-674r4 to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:42 +0000 UTC Normal Pod check-span-674r4 AddedInterface Add eth0 [10.128.2.29/23] from ovn-kubernetes logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:42 +0000 UTC Normal Pod check-span-674r4.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:42 +0000 UTC Normal Pod check-span-674r4.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:42 +0000 UTC Normal Pod check-span-674r4.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:42 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-674r4 job-controller logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:42 +0000 UTC Normal Pod report-span-pbv25 Binding Scheduled Successfully assigned kuttl-test-proud-grub/report-span-pbv25 to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:42 +0000 UTC Normal Pod report-span-pbv25 AddedInterface Add eth0 [10.129.2.41/23] from ovn-kubernetes logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:42 +0000 UTC Normal Pod report-span-pbv25.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:42 +0000 UTC Normal Pod report-span-pbv25.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:42 +0000 UTC Normal Pod report-span-pbv25.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:42 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-pbv25 job-controller logger.go:42: 08:13:54 | examples-with-sampling | 2023-11-13 08:13:53 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:13:54 | examples-with-sampling | Deleting namespace: kuttl-test-proud-grub === CONT kuttl/harness/examples-with-cassandra logger.go:42: 08:18:22 | examples-with-cassandra | Creating namespace: kuttl-test-game-sole logger.go:42: 08:18:22 | examples-with-cassandra/0-install | starting test step 0-install logger.go:42: 08:18:22 | examples-with-cassandra/0-install | running command: [sh -c cd /tmp/jaeger-tests && make cassandra STORAGE_NAMESPACE=$NAMESPACE] logger.go:42: 08:18:22 | examples-with-cassandra/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 08:18:22 | examples-with-cassandra/0-install | >>>> Creating namespace kuttl-test-game-sole logger.go:42: 08:18:22 | examples-with-cassandra/0-install | kubectl create namespace kuttl-test-game-sole 2>&1 | grep -v "already exists" || true logger.go:42: 08:18:22 | examples-with-cassandra/0-install | kubectl create -f ./tests/cassandra.yml --namespace kuttl-test-game-sole 2>&1 | grep -v "already exists" || true logger.go:42: 08:18:23 | examples-with-cassandra/0-install | service/cassandra created logger.go:42: 08:18:23 | examples-with-cassandra/0-install | statefulset.apps/cassandra created logger.go:42: 08:18:23 | examples-with-cassandra/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 08:18:25 | examples-with-cassandra/0-install | test step completed 0-install logger.go:42: 08:18:25 | examples-with-cassandra/1-install | starting test step 1-install logger.go:42: 08:18:25 | examples-with-cassandra/1-install | Jaeger:kuttl-test-game-sole/with-cassandra created logger.go:42: 08:18:44 | examples-with-cassandra/1-install | test step completed 1-install logger.go:42: 08:18:44 | examples-with-cassandra/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:18:44 | examples-with-cassandra/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-cassandra /dev/null] logger.go:42: 08:18:46 | examples-with-cassandra/2-smoke-test | Warning: resource jaegers/with-cassandra is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:19:35 | examples-with-cassandra/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:19:35 | examples-with-cassandra/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:19:36 | examples-with-cassandra/2-smoke-test | job.batch/report-span created logger.go:42: 08:19:36 | examples-with-cassandra/2-smoke-test | job.batch/check-span created logger.go:42: 08:19:47 | examples-with-cassandra/2-smoke-test | test step completed 2-smoke-test logger.go:42: 08:19:47 | examples-with-cassandra | examples-with-cassandra events from ns kuttl-test-game-sole: logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:23 +0000 UTC Normal Pod cassandra-0 Binding Scheduled Successfully assigned kuttl-test-game-sole/cassandra-0 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:23 +0000 UTC Normal Pod cassandra-0 AddedInterface Add eth0 [10.131.0.64/23] from ovn-kubernetes logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:23 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulled Container image "cassandra:3.11" already present on machine kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:23 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:23 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:23 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-0 in StatefulSet cassandra successful statefulset-controller logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:24 +0000 UTC Normal Pod cassandra-1 Binding Scheduled Successfully assigned kuttl-test-game-sole/cassandra-1 to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:24 +0000 UTC Normal Pod cassandra-1 AddedInterface Add eth0 [10.129.2.42/23] from ovn-kubernetes logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:24 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulled Container image "cassandra:3.11" already present on machine kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:24 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:24 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:24 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-1 in StatefulSet cassandra successful statefulset-controller logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:28 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-4sk6t Binding Scheduled Successfully assigned kuttl-test-game-sole/with-cassandra-cassandra-schema-job-4sk6t to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:28 +0000 UTC Normal Job.batch with-cassandra-cassandra-schema-job SuccessfulCreate Created pod: with-cassandra-cassandra-schema-job-4sk6t job-controller logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:29 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-4sk6t AddedInterface Add eth0 [10.128.2.31/23] from ovn-kubernetes logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:29 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-4sk6t.spec.containers{with-cassandra-cassandra-schema-job} Pulling Pulling image "jaegertracing/jaeger-cassandra-schema:1.47.0" kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:33 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-4sk6t.spec.containers{with-cassandra-cassandra-schema-job} Pulled Successfully pulled image "jaegertracing/jaeger-cassandra-schema:1.47.0" in 4.696s (4.696s including waiting) kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:33 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-4sk6t.spec.containers{with-cassandra-cassandra-schema-job} Created Created container with-cassandra-cassandra-schema-job kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:33 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-4sk6t.spec.containers{with-cassandra-cassandra-schema-job} Started Started container with-cassandra-cassandra-schema-job kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:40 +0000 UTC Normal Job.batch with-cassandra-cassandra-schema-job Completed Job completed job-controller logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:41 +0000 UTC Normal Pod with-cassandra-76945545b6-lnltq Binding Scheduled Successfully assigned kuttl-test-game-sole/with-cassandra-76945545b6-lnltq to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:41 +0000 UTC Normal ReplicaSet.apps with-cassandra-76945545b6 SuccessfulCreate Created pod: with-cassandra-76945545b6-lnltq replicaset-controller logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:41 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled up replica set with-cassandra-76945545b6 to 1 deployment-controller logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:42 +0000 UTC Normal Pod with-cassandra-76945545b6-lnltq AddedInterface Add eth0 [10.131.0.65/23] from ovn-kubernetes logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:42 +0000 UTC Normal Pod with-cassandra-76945545b6-lnltq.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:42 +0000 UTC Normal Pod with-cassandra-76945545b6-lnltq.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:42 +0000 UTC Normal Pod with-cassandra-76945545b6-lnltq.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:42 +0000 UTC Normal Pod with-cassandra-76945545b6-lnltq.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:42 +0000 UTC Normal Pod with-cassandra-76945545b6-lnltq.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:42 +0000 UTC Normal Pod with-cassandra-76945545b6-lnltq.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:48 +0000 UTC Normal Pod with-cassandra-76945545b6-lnltq.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:48 +0000 UTC Normal Pod with-cassandra-76945545b6-lnltq.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:48 +0000 UTC Normal ReplicaSet.apps with-cassandra-76945545b6 SuccessfulDelete Deleted pod: with-cassandra-76945545b6-lnltq replicaset-controller logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:48 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled down replica set with-cassandra-76945545b6 to 0 from 1 deployment-controller logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:49 +0000 UTC Normal Pod with-cassandra-6b579c5f-jtg74 Binding Scheduled Successfully assigned kuttl-test-game-sole/with-cassandra-6b579c5f-jtg74 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:49 +0000 UTC Normal Pod with-cassandra-6b579c5f-jtg74 AddedInterface Add eth0 [10.131.0.66/23] from ovn-kubernetes logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:49 +0000 UTC Normal Pod with-cassandra-6b579c5f-jtg74.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:49 +0000 UTC Normal Pod with-cassandra-6b579c5f-jtg74.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:49 +0000 UTC Normal Pod with-cassandra-6b579c5f-jtg74.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:49 +0000 UTC Normal Pod with-cassandra-6b579c5f-jtg74.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:49 +0000 UTC Normal Pod with-cassandra-6b579c5f-jtg74.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:49 +0000 UTC Normal Pod with-cassandra-6b579c5f-jtg74.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:49 +0000 UTC Normal ReplicaSet.apps with-cassandra-6b579c5f SuccessfulCreate Created pod: with-cassandra-6b579c5f-jtg74 replicaset-controller logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:49 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled up replica set with-cassandra-6b579c5f to 1 deployment-controller logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:18:51 +0000 UTC Warning Pod with-cassandra-6b579c5f-jtg74.spec.containers{jaeger} BackOff Back-off restarting failed container jaeger in pod with-cassandra-6b579c5f-jtg74_kuttl-test-game-sole(b741ff64-3d7a-44eb-81d8-5e87175fad91) kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:19:36 +0000 UTC Normal Pod check-span-6ww9n Binding Scheduled Successfully assigned kuttl-test-game-sole/check-span-6ww9n to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:19:36 +0000 UTC Normal Pod check-span-6ww9n AddedInterface Add eth0 [10.129.2.43/23] from ovn-kubernetes logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:19:36 +0000 UTC Normal Pod check-span-6ww9n.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:19:36 +0000 UTC Normal Pod check-span-6ww9n.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:19:36 +0000 UTC Normal Pod check-span-6ww9n.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:19:36 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-6ww9n job-controller logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:19:36 +0000 UTC Normal Pod report-span-nvsdw Binding Scheduled Successfully assigned kuttl-test-game-sole/report-span-nvsdw to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:19:36 +0000 UTC Normal Pod report-span-nvsdw AddedInterface Add eth0 [10.128.2.32/23] from ovn-kubernetes logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:19:36 +0000 UTC Normal Pod report-span-nvsdw.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:19:36 +0000 UTC Normal Pod report-span-nvsdw.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:19:36 +0000 UTC Normal Pod report-span-nvsdw.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:19:36 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-nvsdw job-controller logger.go:42: 08:19:47 | examples-with-cassandra | 2023-11-13 08:19:47 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:19:47 | examples-with-cassandra | Deleting namespace: kuttl-test-game-sole === CONT kuttl/harness/examples-with-badger-and-volume logger.go:42: 08:19:59 | examples-with-badger-and-volume | Creating namespace: kuttl-test-natural-pangolin logger.go:42: 08:19:59 | examples-with-badger-and-volume/0-install | starting test step 0-install logger.go:42: 08:19:59 | examples-with-badger-and-volume/0-install | Jaeger:kuttl-test-natural-pangolin/with-badger-and-volume created logger.go:42: 08:20:06 | examples-with-badger-and-volume/0-install | test step completed 0-install logger.go:42: 08:20:06 | examples-with-badger-and-volume/1-smoke-test | starting test step 1-smoke-test logger.go:42: 08:20:06 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-badger-and-volume /dev/null] logger.go:42: 08:20:08 | examples-with-badger-and-volume/1-smoke-test | Warning: resource jaegers/with-badger-and-volume is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:20:14 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:20:15 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:20:15 | examples-with-badger-and-volume/1-smoke-test | job.batch/report-span created logger.go:42: 08:20:15 | examples-with-badger-and-volume/1-smoke-test | job.batch/check-span created logger.go:42: 08:27:16 | examples-with-badger-and-volume/1-smoke-test | test step failed 1-smoke-test case.go:364: failed in step 1-smoke-test case.go:366: --- Job:kuttl-test-natural-pangolin/check-span +++ Job:kuttl-test-natural-pangolin/check-span @@ -1,8 +1,141 @@ apiVersion: batch/v1 kind: Job metadata: + annotations: + kubectl.kubernetes.io/last-applied-configuration: | + {"apiVersion":"batch/v1","kind":"Job","metadata":{"annotations":{},"name":"check-span","namespace":"kuttl-test-natural-pangolin"},"spec":{"backoffLimit":15,"template":{"spec":{"containers":[{"command":["./query"],"env":[{"name":"SERVICE_NAME","value":"smoke-test-service"},{"name":"QUERY_HOST","value":"https://with-badger-and-volume-query:443"},{"name":"SECRET_PATH","value":"/var/run/secrets/api-token/token"}],"image":"registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2","name":"asserts-container","volumeMounts":[{"mountPath":"/var/run/secrets/api-token","name":"token-api-volume"}]}],"restartPolicy":"OnFailure","volumes":[{"name":"token-api-volume","secret":{"secretName":"e2e-test"}}]}}}} + labels: + batch.kubernetes.io/controller-uid: 4ac868f9-96db-4f73-bdf2-c5e304056528 + batch.kubernetes.io/job-name: check-span + controller-uid: 4ac868f9-96db-4f73-bdf2-c5e304056528 + job-name: check-span + managedFields: + - apiVersion: batch/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:kubectl.kubernetes.io/last-applied-configuration: {} + f:spec: + f:backoffLimit: {} + f:completionMode: {} + f:completions: {} + f:parallelism: {} + f:suspend: {} + f:template: + f:spec: + f:containers: + k:{"name":"asserts-container"}: + .: {} + f:command: {} + f:env: + .: {} + k:{"name":"QUERY_HOST"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SECRET_PATH"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SERVICE_NAME"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:name: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/var/run/secrets/api-token"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"token-api-volume"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: kubectl-client-side-apply + operation: Update + time: "2023-11-13T08:20:15Z" + - apiVersion: batch/v1 + fieldsType: FieldsV1 + fieldsV1: + f:status: + f:active: {} + f:ready: {} + f:startTime: {} + f:uncountedTerminatedPods: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2023-11-13T08:25:33Z" name: check-span namespace: kuttl-test-natural-pangolin +spec: + backoffLimit: 15 + completionMode: NonIndexed + completions: 1 + parallelism: 1 + selector: + matchLabels: + batch.kubernetes.io/controller-uid: 4ac868f9-96db-4f73-bdf2-c5e304056528 + suspend: false + template: + metadata: + creationTimestamp: null + labels: + batch.kubernetes.io/controller-uid: 4ac868f9-96db-4f73-bdf2-c5e304056528 + batch.kubernetes.io/job-name: check-span + controller-uid: 4ac868f9-96db-4f73-bdf2-c5e304056528 + job-name: check-span + spec: + containers: + - command: + - ./query + env: + - name: SERVICE_NAME + value: smoke-test-service + - name: QUERY_HOST + value: https://with-badger-and-volume-query:443 + - name: SECRET_PATH + value: /var/run/secrets/api-token/token + image: registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 + imagePullPolicy: IfNotPresent + name: asserts-container + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /var/run/secrets/api-token + name: token-api-volume + dnsPolicy: ClusterFirst + restartPolicy: OnFailure + schedulerName: default-scheduler + securityContext: {} + terminationGracePeriodSeconds: 30 + volumes: + - name: token-api-volume + secret: + defaultMode: 420 + secretName: e2e-test status: - succeeded: 1 + active: 1 + ready: 1 + startTime: "2023-11-13T08:20:15Z" + uncountedTerminatedPods: {} case.go:366: resource Job:kuttl-test-natural-pangolin/check-span: .status.succeeded: key is missing from map logger.go:42: 08:27:16 | examples-with-badger-and-volume | examples-with-badger-and-volume events from ns kuttl-test-natural-pangolin: logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:03 +0000 UTC Normal Pod with-badger-and-volume-6b474d49b-v7wqr Binding Scheduled Successfully assigned kuttl-test-natural-pangolin/with-badger-and-volume-6b474d49b-v7wqr to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:03 +0000 UTC Normal ReplicaSet.apps with-badger-and-volume-6b474d49b SuccessfulCreate Created pod: with-badger-and-volume-6b474d49b-v7wqr replicaset-controller logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:03 +0000 UTC Normal Deployment.apps with-badger-and-volume ScalingReplicaSet Scaled up replica set with-badger-and-volume-6b474d49b to 1 deployment-controller logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:04 +0000 UTC Normal Pod with-badger-and-volume-6b474d49b-v7wqr AddedInterface Add eth0 [10.131.0.67/23] from ovn-kubernetes logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:04 +0000 UTC Normal Pod with-badger-and-volume-6b474d49b-v7wqr.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:04 +0000 UTC Normal Pod with-badger-and-volume-6b474d49b-v7wqr.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:04 +0000 UTC Normal Pod with-badger-and-volume-6b474d49b-v7wqr.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:04 +0000 UTC Normal Pod with-badger-and-volume-6b474d49b-v7wqr.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:04 +0000 UTC Normal Pod with-badger-and-volume-6b474d49b-v7wqr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:04 +0000 UTC Normal Pod with-badger-and-volume-6b474d49b-v7wqr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:15 +0000 UTC Normal Pod check-span-vzvfh Binding Scheduled Successfully assigned kuttl-test-natural-pangolin/check-span-vzvfh to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:15 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-vzvfh job-controller logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:15 +0000 UTC Normal Pod report-span-6bbrv Binding Scheduled Successfully assigned kuttl-test-natural-pangolin/report-span-6bbrv to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:15 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-6bbrv job-controller logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:16 +0000 UTC Normal Pod check-span-vzvfh AddedInterface Add eth0 [10.129.2.45/23] from ovn-kubernetes logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:16 +0000 UTC Normal Pod check-span-vzvfh.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:16 +0000 UTC Normal Pod check-span-vzvfh.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:16 +0000 UTC Normal Pod check-span-vzvfh.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:16 +0000 UTC Normal Pod report-span-6bbrv AddedInterface Add eth0 [10.129.2.44/23] from ovn-kubernetes logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:16 +0000 UTC Normal Pod report-span-6bbrv.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:16 +0000 UTC Normal Pod report-span-6bbrv.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:20:16 +0000 UTC Normal Pod report-span-6bbrv.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:27:16 | examples-with-badger-and-volume | 2023-11-13 08:25:20 +0000 UTC Warning Pod check-span-vzvfh.spec.containers{asserts-container} BackOff Back-off restarting failed container asserts-container in pod check-span-vzvfh_kuttl-test-natural-pangolin(95dcd780-aed6-4989-8e57-d27c5e7842b4) kubelet logger.go:42: 08:27:16 | examples-with-badger-and-volume | Deleting namespace: kuttl-test-natural-pangolin === CONT kuttl/harness/examples-with-badger logger.go:42: 08:27:22 | examples-with-badger | Creating namespace: kuttl-test-evolving-imp logger.go:42: 08:27:22 | examples-with-badger/0-install | starting test step 0-install logger.go:42: 08:27:23 | examples-with-badger/0-install | Jaeger:kuttl-test-evolving-imp/with-badger created logger.go:42: 08:27:29 | examples-with-badger/0-install | test step completed 0-install logger.go:42: 08:27:29 | examples-with-badger/1-smoke-test | starting test step 1-smoke-test logger.go:42: 08:27:29 | examples-with-badger/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-badger /dev/null] logger.go:42: 08:27:30 | examples-with-badger/1-smoke-test | Warning: resource jaegers/with-badger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:27:37 | examples-with-badger/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:27:38 | examples-with-badger/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:27:38 | examples-with-badger/1-smoke-test | job.batch/report-span created logger.go:42: 08:27:38 | examples-with-badger/1-smoke-test | job.batch/check-span created logger.go:42: 08:27:50 | examples-with-badger/1-smoke-test | test step completed 1-smoke-test logger.go:42: 08:27:50 | examples-with-badger | examples-with-badger events from ns kuttl-test-evolving-imp: logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:26 +0000 UTC Normal Pod with-badger-66479964f5-pmng9 Binding Scheduled Successfully assigned kuttl-test-evolving-imp/with-badger-66479964f5-pmng9 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:26 +0000 UTC Normal Pod with-badger-66479964f5-pmng9 AddedInterface Add eth0 [10.131.0.68/23] from ovn-kubernetes logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:26 +0000 UTC Normal Pod with-badger-66479964f5-pmng9.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:26 +0000 UTC Normal ReplicaSet.apps with-badger-66479964f5 SuccessfulCreate Created pod: with-badger-66479964f5-pmng9 replicaset-controller logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:26 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-66479964f5 to 1 deployment-controller logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:27 +0000 UTC Normal Pod with-badger-66479964f5-pmng9.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:27 +0000 UTC Normal Pod with-badger-66479964f5-pmng9.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:27 +0000 UTC Normal Pod with-badger-66479964f5-pmng9.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:27 +0000 UTC Normal Pod with-badger-66479964f5-pmng9.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:27 +0000 UTC Normal Pod with-badger-66479964f5-pmng9.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:32 +0000 UTC Normal Pod with-badger-66479964f5-pmng9.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:32 +0000 UTC Normal Pod with-badger-66479964f5-pmng9.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:32 +0000 UTC Normal ReplicaSet.apps with-badger-66479964f5 SuccessfulDelete Deleted pod: with-badger-66479964f5-pmng9 replicaset-controller logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:32 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled down replica set with-badger-66479964f5 to 0 from 1 deployment-controller logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:33 +0000 UTC Normal Pod with-badger-88d66f48c-clkhn Binding Scheduled Successfully assigned kuttl-test-evolving-imp/with-badger-88d66f48c-clkhn to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:33 +0000 UTC Normal Pod with-badger-88d66f48c-clkhn AddedInterface Add eth0 [10.131.0.69/23] from ovn-kubernetes logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:33 +0000 UTC Normal Pod with-badger-88d66f48c-clkhn.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:33 +0000 UTC Normal ReplicaSet.apps with-badger-88d66f48c SuccessfulCreate Created pod: with-badger-88d66f48c-clkhn replicaset-controller logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:33 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-88d66f48c to 1 deployment-controller logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:34 +0000 UTC Normal Pod with-badger-88d66f48c-clkhn.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:34 +0000 UTC Normal Pod with-badger-88d66f48c-clkhn.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:34 +0000 UTC Normal Pod with-badger-88d66f48c-clkhn.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:34 +0000 UTC Normal Pod with-badger-88d66f48c-clkhn.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:34 +0000 UTC Normal Pod with-badger-88d66f48c-clkhn.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:38 +0000 UTC Normal Pod check-span-td8n5 Binding Scheduled Successfully assigned kuttl-test-evolving-imp/check-span-td8n5 to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:38 +0000 UTC Normal Pod check-span-td8n5 AddedInterface Add eth0 [10.128.2.33/23] from ovn-kubernetes logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:38 +0000 UTC Normal Pod check-span-td8n5.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:38 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-td8n5 job-controller logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:38 +0000 UTC Normal Pod report-span-ttcdr Binding Scheduled Successfully assigned kuttl-test-evolving-imp/report-span-ttcdr to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:38 +0000 UTC Normal Pod report-span-ttcdr AddedInterface Add eth0 [10.129.2.46/23] from ovn-kubernetes logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:38 +0000 UTC Normal Pod report-span-ttcdr.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:38 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-ttcdr job-controller logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:39 +0000 UTC Normal Pod check-span-td8n5.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:39 +0000 UTC Normal Pod check-span-td8n5.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:39 +0000 UTC Normal Pod report-span-ttcdr.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:39 +0000 UTC Normal Pod report-span-ttcdr.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:27:50 | examples-with-badger | 2023-11-13 08:27:50 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:27:50 | examples-with-badger | Deleting namespace: kuttl-test-evolving-imp === CONT kuttl/harness/examples-simplest logger.go:42: 08:28:02 | examples-simplest | Creating namespace: kuttl-test-needed-doe logger.go:42: 08:28:02 | examples-simplest/0-install | starting test step 0-install logger.go:42: 08:28:02 | examples-simplest/0-install | Jaeger:kuttl-test-needed-doe/simplest created logger.go:42: 08:28:08 | examples-simplest/0-install | test step completed 0-install logger.go:42: 08:28:08 | examples-simplest/1-smoke-test | starting test step 1-smoke-test logger.go:42: 08:28:08 | examples-simplest/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 08:28:10 | examples-simplest/1-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:28:16 | examples-simplest/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:28:17 | examples-simplest/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:28:17 | examples-simplest/1-smoke-test | job.batch/report-span created logger.go:42: 08:28:17 | examples-simplest/1-smoke-test | job.batch/check-span created logger.go:42: 08:28:28 | examples-simplest/1-smoke-test | test step completed 1-smoke-test logger.go:42: 08:28:28 | examples-simplest | examples-simplest events from ns kuttl-test-needed-doe: logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:05 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-5bd496b886 to 1 deployment-controller logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:06 +0000 UTC Normal Pod simplest-5bd496b886-znk6t Binding Scheduled Successfully assigned kuttl-test-needed-doe/simplest-5bd496b886-znk6t to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:06 +0000 UTC Warning Pod simplest-5bd496b886-znk6t FailedMount MountVolume.SetUp failed for volume "simplest-ui-oauth-proxy-tls" : secret "simplest-ui-oauth-proxy-tls" not found kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:06 +0000 UTC Warning Pod simplest-5bd496b886-znk6t FailedMount MountVolume.SetUp failed for volume "simplest-collector-tls-config-volume" : secret "simplest-collector-headless-tls" not found kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:06 +0000 UTC Normal ReplicaSet.apps simplest-5bd496b886 SuccessfulCreate Created pod: simplest-5bd496b886-znk6t replicaset-controller logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:07 +0000 UTC Normal Pod simplest-5bd496b886-znk6t AddedInterface Add eth0 [10.131.0.70/23] from ovn-kubernetes logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:07 +0000 UTC Normal Pod simplest-5bd496b886-znk6t.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:07 +0000 UTC Normal Pod simplest-5bd496b886-znk6t.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:07 +0000 UTC Normal Pod simplest-5bd496b886-znk6t.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:07 +0000 UTC Normal Pod simplest-5bd496b886-znk6t.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:07 +0000 UTC Normal Pod simplest-5bd496b886-znk6t.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:07 +0000 UTC Normal Pod simplest-5bd496b886-znk6t.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:12 +0000 UTC Normal Pod simplest-5bd496b886-znk6t.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:12 +0000 UTC Normal Pod simplest-5bd496b886-znk6t.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:12 +0000 UTC Normal ReplicaSet.apps simplest-5bd496b886 SuccessfulDelete Deleted pod: simplest-5bd496b886-znk6t replicaset-controller logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:12 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-5bd496b886 to 0 from 1 deployment-controller logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:13 +0000 UTC Normal Pod simplest-554b995945-cj4x4 Binding Scheduled Successfully assigned kuttl-test-needed-doe/simplest-554b995945-cj4x4 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:13 +0000 UTC Normal ReplicaSet.apps simplest-554b995945 SuccessfulCreate Created pod: simplest-554b995945-cj4x4 replicaset-controller logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:13 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-554b995945 to 1 deployment-controller logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:14 +0000 UTC Normal Pod simplest-554b995945-cj4x4 AddedInterface Add eth0 [10.131.0.71/23] from ovn-kubernetes logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:14 +0000 UTC Normal Pod simplest-554b995945-cj4x4.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:14 +0000 UTC Normal Pod simplest-554b995945-cj4x4.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:14 +0000 UTC Normal Pod simplest-554b995945-cj4x4.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:14 +0000 UTC Normal Pod simplest-554b995945-cj4x4.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:14 +0000 UTC Normal Pod simplest-554b995945-cj4x4.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:14 +0000 UTC Normal Pod simplest-554b995945-cj4x4.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:17 +0000 UTC Normal Pod check-span-k86ds Binding Scheduled Successfully assigned kuttl-test-needed-doe/check-span-k86ds to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:17 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-k86ds job-controller logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:17 +0000 UTC Normal Pod report-span-vd5rg Binding Scheduled Successfully assigned kuttl-test-needed-doe/report-span-vd5rg to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:17 +0000 UTC Normal Pod report-span-vd5rg AddedInterface Add eth0 [10.129.2.47/23] from ovn-kubernetes logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:17 +0000 UTC Normal Pod report-span-vd5rg.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:17 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-vd5rg job-controller logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:18 +0000 UTC Normal Pod check-span-k86ds AddedInterface Add eth0 [10.129.2.48/23] from ovn-kubernetes logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:18 +0000 UTC Normal Pod check-span-k86ds.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:18 +0000 UTC Normal Pod check-span-k86ds.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:18 +0000 UTC Normal Pod check-span-k86ds.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:18 +0000 UTC Normal Pod report-span-vd5rg.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:18 +0000 UTC Normal Pod report-span-vd5rg.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:28:28 | examples-simplest | 2023-11-13 08:28:28 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:28:28 | examples-simplest | Deleting namespace: kuttl-test-needed-doe === CONT kuttl/harness/examples-simple-prod-with-volumes logger.go:42: 08:28:40 | examples-simple-prod-with-volumes | Ignoring 03-check-volume.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:28:40 | examples-simple-prod-with-volumes | Creating namespace: kuttl-test-balanced-maggot logger.go:42: 08:28:40 | examples-simple-prod-with-volumes/1-install | starting test step 1-install logger.go:42: 08:28:40 | examples-simple-prod-with-volumes/1-install | Jaeger:kuttl-test-balanced-maggot/simple-prod created logger.go:42: 08:29:15 | examples-simple-prod-with-volumes/1-install | test step completed 1-install logger.go:42: 08:29:15 | examples-simple-prod-with-volumes/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:29:15 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 08:29:17 | examples-simple-prod-with-volumes/2-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:29:23 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:29:24 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:29:24 | examples-simple-prod-with-volumes/2-smoke-test | job.batch/report-span created logger.go:42: 08:29:24 | examples-simple-prod-with-volumes/2-smoke-test | job.batch/check-span created logger.go:42: 08:29:36 | examples-simple-prod-with-volumes/2-smoke-test | test step completed 2-smoke-test logger.go:42: 08:29:36 | examples-simple-prod-with-volumes/3-check-volume | starting test step 3-check-volume logger.go:42: 08:29:36 | examples-simple-prod-with-volumes/3-check-volume | running command: [sh -c kubectl exec $(kubectl get pods -n $NAMESPACE -l app=jaeger -l app.kubernetes.io/component=collector -o yaml | /tmp/jaeger-tests/bin/yq e '.items[0].metadata.name') -n $NAMESPACE -- ls /usr/share/elasticsearch/data] logger.go:42: 08:29:37 | examples-simple-prod-with-volumes/3-check-volume | test step completed 3-check-volume logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | examples-simple-prod-with-volumes events from ns kuttl-test-balanced-maggot: logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:28:46 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestbalancedmaggotsimpleprod-1-5989c9876 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestbalancedmaggotsimpleprod-1-5989g97wc replicaset-controller logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:28:46 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbalancedmaggotsimpleprod-1-5989g97wc Binding Scheduled Successfully assigned kuttl-test-balanced-maggot/elasticsearch-cdm-kuttltestbalancedmaggotsimpleprod-1-5989g97wc to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:28:46 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbalancedmaggotsimpleprod-1-5989g97wc AddedInterface Add eth0 [10.128.2.34/23] from ovn-kubernetes logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:28:46 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbalancedmaggotsimpleprod-1-5989g97wc.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:28:46 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbalancedmaggotsimpleprod-1-5989g97wc.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:28:46 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbalancedmaggotsimpleprod-1-5989g97wc.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:28:46 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbalancedmaggotsimpleprod-1-5989g97wc.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:28:46 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbalancedmaggotsimpleprod-1-5989g97wc.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:28:46 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbalancedmaggotsimpleprod-1-5989g97wc.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:28:46 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestbalancedmaggotsimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestbalancedmaggotsimpleprod-1-5989c9876 to 1 deployment-controller logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:28:56 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestbalancedmaggotsimpleprod-1-5989g97wc.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:01 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestbalancedmaggotsimpleprod-1-5989g97wc.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:13 +0000 UTC Normal Pod simple-prod-collector-55ff468b9d-rsrrg Binding Scheduled Successfully assigned kuttl-test-balanced-maggot/simple-prod-collector-55ff468b9d-rsrrg to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:13 +0000 UTC Normal Pod simple-prod-collector-55ff468b9d-rsrrg AddedInterface Add eth0 [10.131.0.72/23] from ovn-kubernetes logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:13 +0000 UTC Normal Pod simple-prod-collector-55ff468b9d-rsrrg.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:13 +0000 UTC Normal Pod simple-prod-collector-55ff468b9d-rsrrg.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:13 +0000 UTC Normal Pod simple-prod-collector-55ff468b9d-rsrrg.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:13 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-55ff468b9d SuccessfulCreate Created pod: simple-prod-collector-55ff468b9d-rsrrg replicaset-controller logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:13 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-55ff468b9d to 1 deployment-controller logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:13 +0000 UTC Normal Pod simple-prod-query-774b94fc75-tfzcp Binding Scheduled Successfully assigned kuttl-test-balanced-maggot/simple-prod-query-774b94fc75-tfzcp to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:13 +0000 UTC Normal Pod simple-prod-query-774b94fc75-tfzcp AddedInterface Add eth0 [10.129.2.49/23] from ovn-kubernetes logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:13 +0000 UTC Normal Pod simple-prod-query-774b94fc75-tfzcp.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:13 +0000 UTC Normal Pod simple-prod-query-774b94fc75-tfzcp.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:13 +0000 UTC Normal Pod simple-prod-query-774b94fc75-tfzcp.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:13 +0000 UTC Normal Pod simple-prod-query-774b94fc75-tfzcp.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:13 +0000 UTC Normal Pod simple-prod-query-774b94fc75-tfzcp.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:13 +0000 UTC Normal Pod simple-prod-query-774b94fc75-tfzcp.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:13 +0000 UTC Normal Pod simple-prod-query-774b94fc75-tfzcp.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:13 +0000 UTC Normal Pod simple-prod-query-774b94fc75-tfzcp.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:13 +0000 UTC Normal Pod simple-prod-query-774b94fc75-tfzcp.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:13 +0000 UTC Normal ReplicaSet.apps simple-prod-query-774b94fc75 SuccessfulCreate Created pod: simple-prod-query-774b94fc75-tfzcp replicaset-controller logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:13 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-774b94fc75 to 1 deployment-controller logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:18 +0000 UTC Normal Pod simple-prod-query-774b94fc75-tfzcp.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:18 +0000 UTC Normal Pod simple-prod-query-774b94fc75-tfzcp.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:18 +0000 UTC Normal Pod simple-prod-query-774b94fc75-tfzcp.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:18 +0000 UTC Normal ReplicaSet.apps simple-prod-query-774b94fc75 SuccessfulDelete Deleted pod: simple-prod-query-774b94fc75-tfzcp replicaset-controller logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:18 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-774b94fc75 to 0 from 1 deployment-controller logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:19 +0000 UTC Normal Pod simple-prod-query-57c6b4bb65-nfbvw Binding Scheduled Successfully assigned kuttl-test-balanced-maggot/simple-prod-query-57c6b4bb65-nfbvw to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:19 +0000 UTC Normal ReplicaSet.apps simple-prod-query-57c6b4bb65 SuccessfulCreate Created pod: simple-prod-query-57c6b4bb65-nfbvw replicaset-controller logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:19 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-57c6b4bb65 to 1 deployment-controller logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:20 +0000 UTC Normal Pod simple-prod-query-57c6b4bb65-nfbvw AddedInterface Add eth0 [10.129.2.50/23] from ovn-kubernetes logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:20 +0000 UTC Normal Pod simple-prod-query-57c6b4bb65-nfbvw.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:20 +0000 UTC Normal Pod simple-prod-query-57c6b4bb65-nfbvw.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:20 +0000 UTC Normal Pod simple-prod-query-57c6b4bb65-nfbvw.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:20 +0000 UTC Normal Pod simple-prod-query-57c6b4bb65-nfbvw.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:20 +0000 UTC Normal Pod simple-prod-query-57c6b4bb65-nfbvw.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:20 +0000 UTC Normal Pod simple-prod-query-57c6b4bb65-nfbvw.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:20 +0000 UTC Normal Pod simple-prod-query-57c6b4bb65-nfbvw.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:20 +0000 UTC Normal Pod simple-prod-query-57c6b4bb65-nfbvw.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:20 +0000 UTC Normal Pod simple-prod-query-57c6b4bb65-nfbvw.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:24 +0000 UTC Normal Pod check-span-58dxw Binding Scheduled Successfully assigned kuttl-test-balanced-maggot/check-span-58dxw to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:24 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-58dxw job-controller logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:24 +0000 UTC Normal Pod report-span-78756 Binding Scheduled Successfully assigned kuttl-test-balanced-maggot/report-span-78756 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:24 +0000 UTC Normal Pod report-span-78756 AddedInterface Add eth0 [10.131.0.73/23] from ovn-kubernetes logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:24 +0000 UTC Normal Pod report-span-78756.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:24 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-78756 job-controller logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:25 +0000 UTC Normal Pod check-span-58dxw AddedInterface Add eth0 [10.131.0.74/23] from ovn-kubernetes logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:25 +0000 UTC Normal Pod check-span-58dxw.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:25 +0000 UTC Normal Pod check-span-58dxw.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:25 +0000 UTC Normal Pod check-span-58dxw.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:25 +0000 UTC Normal Pod report-span-78756.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:25 +0000 UTC Normal Pod report-span-78756.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:30 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:30 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:30 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | 2023-11-13 08:29:35 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:29:37 | examples-simple-prod-with-volumes | Deleting namespace: kuttl-test-balanced-maggot === CONT kuttl/harness/examples-simple-prod logger.go:42: 08:29:49 | examples-simple-prod | Creating namespace: kuttl-test-allowed-seagull logger.go:42: 08:29:49 | examples-simple-prod/1-install | starting test step 1-install logger.go:42: 08:29:49 | examples-simple-prod/1-install | Jaeger:kuttl-test-allowed-seagull/simple-prod created logger.go:42: 08:30:25 | examples-simple-prod/1-install | test step completed 1-install logger.go:42: 08:30:25 | examples-simple-prod/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:30:25 | examples-simple-prod/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 08:30:27 | examples-simple-prod/2-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:30:33 | examples-simple-prod/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:30:33 | examples-simple-prod/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:30:34 | examples-simple-prod/2-smoke-test | job.batch/report-span created logger.go:42: 08:30:34 | examples-simple-prod/2-smoke-test | job.batch/check-span created logger.go:42: 08:30:46 | examples-simple-prod/2-smoke-test | test step completed 2-smoke-test logger.go:42: 08:30:46 | examples-simple-prod | examples-simple-prod events from ns kuttl-test-allowed-seagull: logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:29:55 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestallowedseagullsimpleprod-1-6dc688b9d8 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestallowedseagullsimpleprod-1-6dc6hcsf4 replicaset-controller logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:29:55 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestallowedseagullsimpleprod-1-6dc6hcsf4 Binding Scheduled Successfully assigned kuttl-test-allowed-seagull/elasticsearch-cdm-kuttltestallowedseagullsimpleprod-1-6dc6hcsf4 to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:29:55 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestallowedseagullsimpleprod-1-6dc6hcsf4 AddedInterface Add eth0 [10.128.2.35/23] from ovn-kubernetes logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:29:55 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestallowedseagullsimpleprod-1-6dc6hcsf4.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:29:55 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestallowedseagullsimpleprod-1-6dc6hcsf4.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:29:55 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestallowedseagullsimpleprod-1-6dc6hcsf4.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:29:55 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestallowedseagullsimpleprod-1-6dc6hcsf4.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:29:55 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestallowedseagullsimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestallowedseagullsimpleprod-1-6dc688b9d8 to 1 deployment-controller logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:29:56 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestallowedseagullsimpleprod-1-6dc6hcsf4.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:29:56 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestallowedseagullsimpleprod-1-6dc6hcsf4.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:05 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestallowedseagullsimpleprod-1-6dc6hcsf4.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:11 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestallowedseagullsimpleprod-1-6dc6hcsf4.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:22 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-g8snt Binding Scheduled Successfully assigned kuttl-test-allowed-seagull/simple-prod-collector-77fcbdc546-g8snt to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:22 +0000 UTC Warning Pod simple-prod-collector-77fcbdc546-g8snt FailedMount MountVolume.SetUp failed for volume "simple-prod-collector-tls-config-volume" : secret "simple-prod-collector-headless-tls" not found kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:22 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-g8snt AddedInterface Add eth0 [10.129.2.51/23] from ovn-kubernetes logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:22 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-g8snt.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:22 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-g8snt.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:22 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-g8snt.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:22 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-77fcbdc546 SuccessfulCreate Created pod: simple-prod-collector-77fcbdc546-g8snt replicaset-controller logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:22 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-77fcbdc546 to 1 deployment-controller logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:22 +0000 UTC Normal Pod simple-prod-query-dccd47589-nn774 Binding Scheduled Successfully assigned kuttl-test-allowed-seagull/simple-prod-query-dccd47589-nn774 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:22 +0000 UTC Warning Pod simple-prod-query-dccd47589-nn774 FailedMount MountVolume.SetUp failed for volume "simple-prod-ui-oauth-proxy-tls" : secret "simple-prod-ui-oauth-proxy-tls" not found kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:22 +0000 UTC Normal Pod simple-prod-query-dccd47589-nn774 AddedInterface Add eth0 [10.131.0.75/23] from ovn-kubernetes logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:22 +0000 UTC Normal Pod simple-prod-query-dccd47589-nn774.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:22 +0000 UTC Normal Pod simple-prod-query-dccd47589-nn774.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:22 +0000 UTC Normal Pod simple-prod-query-dccd47589-nn774.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:22 +0000 UTC Normal Pod simple-prod-query-dccd47589-nn774.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:22 +0000 UTC Normal Pod simple-prod-query-dccd47589-nn774.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:22 +0000 UTC Normal ReplicaSet.apps simple-prod-query-dccd47589 SuccessfulCreate Created pod: simple-prod-query-dccd47589-nn774 replicaset-controller logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:22 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-dccd47589 to 1 deployment-controller logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:23 +0000 UTC Normal Pod simple-prod-query-dccd47589-nn774.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:23 +0000 UTC Normal Pod simple-prod-query-dccd47589-nn774.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:23 +0000 UTC Normal Pod simple-prod-query-dccd47589-nn774.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:23 +0000 UTC Normal Pod simple-prod-query-dccd47589-nn774.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:28 +0000 UTC Normal Pod simple-prod-query-76b78dfdf-8tm67 Binding Scheduled Successfully assigned kuttl-test-allowed-seagull/simple-prod-query-76b78dfdf-8tm67 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:28 +0000 UTC Normal ReplicaSet.apps simple-prod-query-76b78dfdf SuccessfulCreate Created pod: simple-prod-query-76b78dfdf-8tm67 replicaset-controller logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:28 +0000 UTC Normal Pod simple-prod-query-dccd47589-nn774.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:28 +0000 UTC Normal Pod simple-prod-query-dccd47589-nn774.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:28 +0000 UTC Normal Pod simple-prod-query-dccd47589-nn774.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:28 +0000 UTC Normal ReplicaSet.apps simple-prod-query-dccd47589 SuccessfulDelete Deleted pod: simple-prod-query-dccd47589-nn774 replicaset-controller logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:28 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-dccd47589 to 0 from 1 deployment-controller logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:28 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-76b78dfdf to 1 deployment-controller logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:29 +0000 UTC Normal Pod simple-prod-query-76b78dfdf-8tm67 AddedInterface Add eth0 [10.131.0.76/23] from ovn-kubernetes logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:29 +0000 UTC Normal Pod simple-prod-query-76b78dfdf-8tm67.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:29 +0000 UTC Normal Pod simple-prod-query-76b78dfdf-8tm67.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:29 +0000 UTC Normal Pod simple-prod-query-76b78dfdf-8tm67.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:29 +0000 UTC Normal Pod simple-prod-query-76b78dfdf-8tm67.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:29 +0000 UTC Normal Pod simple-prod-query-76b78dfdf-8tm67.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:29 +0000 UTC Normal Pod simple-prod-query-76b78dfdf-8tm67.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:29 +0000 UTC Normal Pod simple-prod-query-76b78dfdf-8tm67.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:29 +0000 UTC Normal Pod simple-prod-query-76b78dfdf-8tm67.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:29 +0000 UTC Normal Pod simple-prod-query-76b78dfdf-8tm67.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:34 +0000 UTC Normal Pod check-span-lk5w5 Binding Scheduled Successfully assigned kuttl-test-allowed-seagull/check-span-lk5w5 to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:34 +0000 UTC Normal Pod check-span-lk5w5 AddedInterface Add eth0 [10.129.2.53/23] from ovn-kubernetes logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:34 +0000 UTC Normal Pod check-span-lk5w5.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:34 +0000 UTC Normal Pod check-span-lk5w5.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:34 +0000 UTC Normal Pod check-span-lk5w5.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:34 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-lk5w5 job-controller logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:34 +0000 UTC Normal Pod report-span-lxz78 Binding Scheduled Successfully assigned kuttl-test-allowed-seagull/report-span-lxz78 to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:34 +0000 UTC Normal Pod report-span-lxz78 AddedInterface Add eth0 [10.129.2.52/23] from ovn-kubernetes logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:34 +0000 UTC Normal Pod report-span-lxz78.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:34 +0000 UTC Normal Pod report-span-lxz78.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:34 +0000 UTC Normal Pod report-span-lxz78.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:34 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-lxz78 job-controller logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:37 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:37 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:37 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:30:46 | examples-simple-prod | 2023-11-13 08:30:45 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:30:46 | examples-simple-prod | Deleting namespace: kuttl-test-allowed-seagull === CONT kuttl/harness/examples-business-application-injected-sidecar logger.go:42: 08:30:58 | examples-business-application-injected-sidecar | Creating namespace: kuttl-test-hardy-swan logger.go:42: 08:30:58 | examples-business-application-injected-sidecar/0-install | starting test step 0-install logger.go:42: 08:30:58 | examples-business-application-injected-sidecar/0-install | Deployment:kuttl-test-hardy-swan/myapp created logger.go:42: 08:30:58 | examples-business-application-injected-sidecar/0-install | test step completed 0-install logger.go:42: 08:30:58 | examples-business-application-injected-sidecar/1-install | starting test step 1-install logger.go:42: 08:30:58 | examples-business-application-injected-sidecar/1-install | Jaeger:kuttl-test-hardy-swan/simplest created logger.go:42: 08:31:09 | examples-business-application-injected-sidecar/1-install | test step completed 1-install logger.go:42: 08:31:09 | examples-business-application-injected-sidecar/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:31:09 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 08:31:11 | examples-business-application-injected-sidecar/2-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:31:20 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:31:21 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:31:21 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/report-span created logger.go:42: 08:31:21 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/check-span created logger.go:42: 08:31:32 | examples-business-application-injected-sidecar/2-smoke-test | test step completed 2-smoke-test logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | examples-business-application-injected-sidecar events from ns kuttl-test-hardy-swan: logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:30:58 +0000 UTC Normal Pod myapp-679f79d5f8-cvkpf Binding Scheduled Successfully assigned kuttl-test-hardy-swan/myapp-679f79d5f8-cvkpf to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:30:58 +0000 UTC Normal ReplicaSet.apps myapp-679f79d5f8 SuccessfulCreate Created pod: myapp-679f79d5f8-cvkpf replicaset-controller logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:30:58 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-679f79d5f8 to 1 deployment-controller logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:30:59 +0000 UTC Normal Pod myapp-5f76ff9685-v2b5q Binding Scheduled Successfully assigned kuttl-test-hardy-swan/myapp-5f76ff9685-v2b5q to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:30:59 +0000 UTC Warning Pod myapp-5f76ff9685-v2b5q FailedMount MountVolume.SetUp failed for volume "simplest-trusted-ca" : configmap "simplest-trusted-ca" not found kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:30:59 +0000 UTC Warning Pod myapp-5f76ff9685-v2b5q FailedMount MountVolume.SetUp failed for volume "simplest-service-ca" : configmap "simplest-service-ca" not found kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:30:59 +0000 UTC Normal ReplicaSet.apps myapp-5f76ff9685 SuccessfulCreate Created pod: myapp-5f76ff9685-v2b5q replicaset-controller logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:30:59 +0000 UTC Normal Pod myapp-679f79d5f8-cvkpf AddedInterface Add eth0 [10.129.2.54/23] from ovn-kubernetes logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:30:59 +0000 UTC Normal Pod myapp-679f79d5f8-cvkpf.spec.containers{myapp} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:30:59 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-5f76ff9685 to 1 deployment-controller logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:03 +0000 UTC Normal Pod myapp-679f79d5f8-cvkpf.spec.containers{myapp} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 4.099s (4.099s including waiting) kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:03 +0000 UTC Normal Pod myapp-679f79d5f8-cvkpf.spec.containers{myapp} Created Created container myapp kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:03 +0000 UTC Normal Pod myapp-679f79d5f8-cvkpf.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:06 +0000 UTC Normal Pod simplest-c5d4d7d69-td599 Binding Scheduled Successfully assigned kuttl-test-hardy-swan/simplest-c5d4d7d69-td599 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:06 +0000 UTC Normal ReplicaSet.apps simplest-c5d4d7d69 SuccessfulCreate Created pod: simplest-c5d4d7d69-td599 replicaset-controller logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:06 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-c5d4d7d69 to 1 deployment-controller logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:07 +0000 UTC Normal Pod myapp-5f76ff9685-v2b5q AddedInterface Add eth0 [10.131.0.77/23] from ovn-kubernetes logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:07 +0000 UTC Normal Pod myapp-5f76ff9685-v2b5q.spec.containers{myapp} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:07 +0000 UTC Warning Pod simplest-c5d4d7d69-td599 FailedMount MountVolume.SetUp failed for volume "simplest-ui-oauth-proxy-tls" : secret "simplest-ui-oauth-proxy-tls" not found kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:07 +0000 UTC Normal Pod simplest-c5d4d7d69-td599 AddedInterface Add eth0 [10.131.0.78/23] from ovn-kubernetes logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:07 +0000 UTC Normal Pod simplest-c5d4d7d69-td599.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:08 +0000 UTC Normal Pod simplest-c5d4d7d69-td599.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:08 +0000 UTC Normal Pod simplest-c5d4d7d69-td599.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:08 +0000 UTC Normal Pod simplest-c5d4d7d69-td599.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:08 +0000 UTC Normal Pod simplest-c5d4d7d69-td599.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:08 +0000 UTC Normal Pod simplest-c5d4d7d69-td599.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:10 +0000 UTC Warning Pod myapp-679f79d5f8-cvkpf.spec.containers{myapp} Unhealthy Liveness probe failed: Get "http://10.129.2.54:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:11 +0000 UTC Normal Pod myapp-5f76ff9685-v2b5q.spec.containers{myapp} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.938s (3.938s including waiting) kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:11 +0000 UTC Normal Pod myapp-5f76ff9685-v2b5q.spec.containers{myapp} Created Created container myapp kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:11 +0000 UTC Normal Pod myapp-5f76ff9685-v2b5q.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:11 +0000 UTC Normal Pod myapp-5f76ff9685-v2b5q.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:12 +0000 UTC Normal Pod myapp-5f76ff9685-v2b5q.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:12 +0000 UTC Normal Pod myapp-5f76ff9685-v2b5q.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:13 +0000 UTC Normal Pod myapp-679f79d5f8-cvkpf.spec.containers{myapp} Killing Stopping container myapp kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:13 +0000 UTC Normal ReplicaSet.apps myapp-679f79d5f8 SuccessfulDelete Deleted pod: myapp-679f79d5f8-cvkpf replicaset-controller logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:13 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled down replica set myapp-679f79d5f8 to 0 from 1 deployment-controller logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:16 +0000 UTC Normal Pod simplest-c5d4d7d69-td599.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:16 +0000 UTC Normal Pod simplest-c5d4d7d69-td599.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:16 +0000 UTC Normal ReplicaSet.apps simplest-c5d4d7d69 SuccessfulDelete Deleted pod: simplest-c5d4d7d69-td599 replicaset-controller logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:16 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-c5d4d7d69 to 0 from 1 deployment-controller logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:17 +0000 UTC Normal Pod simplest-7bb9c9fbfd-5vmpm Binding Scheduled Successfully assigned kuttl-test-hardy-swan/simplest-7bb9c9fbfd-5vmpm to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:17 +0000 UTC Normal Pod simplest-7bb9c9fbfd-5vmpm AddedInterface Add eth0 [10.129.2.55/23] from ovn-kubernetes logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:17 +0000 UTC Normal Pod simplest-7bb9c9fbfd-5vmpm.spec.containers{jaeger} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:17 +0000 UTC Normal ReplicaSet.apps simplest-7bb9c9fbfd SuccessfulCreate Created pod: simplest-7bb9c9fbfd-5vmpm replicaset-controller logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:17 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-7bb9c9fbfd to 1 deployment-controller logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:18 +0000 UTC Warning Pod myapp-5f76ff9685-v2b5q.spec.containers{myapp} Unhealthy Liveness probe failed: Get "http://10.131.0.77:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:19 +0000 UTC Normal Pod simplest-7bb9c9fbfd-5vmpm.spec.containers{jaeger} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" in 1.861s (1.861s including waiting) kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:19 +0000 UTC Normal Pod simplest-7bb9c9fbfd-5vmpm.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:19 +0000 UTC Normal Pod simplest-7bb9c9fbfd-5vmpm.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:19 +0000 UTC Normal Pod simplest-7bb9c9fbfd-5vmpm.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:19 +0000 UTC Normal Pod simplest-7bb9c9fbfd-5vmpm.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:19 +0000 UTC Normal Pod simplest-7bb9c9fbfd-5vmpm.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:21 +0000 UTC Normal Pod check-span-w6nbt Binding Scheduled Successfully assigned kuttl-test-hardy-swan/check-span-w6nbt to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:21 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-w6nbt job-controller logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:21 +0000 UTC Normal Pod report-span-2jsw5 Binding Scheduled Successfully assigned kuttl-test-hardy-swan/report-span-2jsw5 to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:21 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-2jsw5 job-controller logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:22 +0000 UTC Normal Pod check-span-w6nbt AddedInterface Add eth0 [10.128.2.38/23] from ovn-kubernetes logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:22 +0000 UTC Normal Pod check-span-w6nbt.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:22 +0000 UTC Normal Pod check-span-w6nbt.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:22 +0000 UTC Normal Pod check-span-w6nbt.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:22 +0000 UTC Normal Pod report-span-2jsw5 AddedInterface Add eth0 [10.128.2.37/23] from ovn-kubernetes logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:22 +0000 UTC Normal Pod report-span-2jsw5.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:22 +0000 UTC Normal Pod report-span-2jsw5.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:22 +0000 UTC Normal Pod report-span-2jsw5.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | 2023-11-13 08:31:32 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:31:33 | examples-business-application-injected-sidecar | Deleting namespace: kuttl-test-hardy-swan === CONT kuttl/harness/examples-openshift-with-htpasswd logger.go:42: 08:31:39 | examples-openshift-with-htpasswd | Ignoring 00-install.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:31:39 | examples-openshift-with-htpasswd | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:31:39 | examples-openshift-with-htpasswd | Creating namespace: kuttl-test-workable-hawk logger.go:42: 08:31:39 | examples-openshift-with-htpasswd/0-install | starting test step 0-install logger.go:42: 08:31:39 | examples-openshift-with-htpasswd/0-install | Secret:kuttl-test-workable-hawk/htpasswd created logger.go:42: 08:31:39 | examples-openshift-with-htpasswd/0-install | test step completed 0-install logger.go:42: 08:31:39 | examples-openshift-with-htpasswd/1-install | starting test step 1-install logger.go:42: 08:31:39 | examples-openshift-with-htpasswd/1-install | Jaeger:kuttl-test-workable-hawk/with-htpasswd created logger.go:42: 08:31:45 | examples-openshift-with-htpasswd/1-install | test step completed 1-install logger.go:42: 08:31:45 | examples-openshift-with-htpasswd/2-check-unsecured | starting test step 2-check-unsecured logger.go:42: 08:31:45 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [./ensure-ingress-host.sh] logger.go:42: 08:31:45 | examples-openshift-with-htpasswd/2-check-unsecured | Checking the Ingress host value was populated logger.go:42: 08:31:45 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 0 logger.go:42: 08:31:45 | examples-openshift-with-htpasswd/2-check-unsecured | error: error executing jsonpath "{.items[0].status.ingress[0].host}": Error executing template: array index out of bounds: index 0, length 0. Printing more information for debugging the template: logger.go:42: 08:31:45 | examples-openshift-with-htpasswd/2-check-unsecured | template was: logger.go:42: 08:31:45 | examples-openshift-with-htpasswd/2-check-unsecured | {.items[0].status.ingress[0].host} logger.go:42: 08:31:45 | examples-openshift-with-htpasswd/2-check-unsecured | object given to jsonpath engine was: logger.go:42: 08:31:45 | examples-openshift-with-htpasswd/2-check-unsecured | map[string]interface {}{"apiVersion":"v1", "items":XXinterface {}{}, "kind":"List", "metadata":map[string]interface {}{"resourceVersion":""}} logger.go:42: 08:31:45 | examples-openshift-with-htpasswd/2-check-unsecured | logger.go:42: 08:31:45 | examples-openshift-with-htpasswd/2-check-unsecured | logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 1 logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/2-check-unsecured | Hostname is with-htpasswd-kuttl-test-workable-hawk.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/2-check-unsecured | Checking an expected HTTP response logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/2-check-unsecured | Running in OpenShift logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/2-check-unsecured | Not using any secret logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 1/30 the https://with-htpasswd-kuttl-test-workable-hawk.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/2-check-unsecured | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 2/30 the https://with-htpasswd-kuttl-test-workable-hawk.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/2-check-unsecured | curl response asserted properly logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/2-check-unsecured | test step completed 2-check-unsecured logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/3-check-unauthorized | starting test step 3-check-unauthorized logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [./ensure-ingress-host.sh] logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking the Ingress host value was populated logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 0 logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/3-check-unauthorized | Hostname is with-htpasswd-kuttl-test-workable-hawk.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [sh -c JAEGER_USERNAME=wronguser JAEGER_PASSWORD=wrongpassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking an expected HTTP response logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/3-check-unauthorized | Running in OpenShift logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/3-check-unauthorized | Using Jaeger basic authentication logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 1/30 the https://with-htpasswd-kuttl-test-workable-hawk.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/3-check-unauthorized | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 2/30 the https://with-htpasswd-kuttl-test-workable-hawk.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/3-check-unauthorized | curl response asserted properly logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/3-check-unauthorized | test step completed 3-check-unauthorized logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/4-check-authorized | starting test step 4-check-authorized logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/4-check-authorized | running command: [./ensure-ingress-host.sh] logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/4-check-authorized | Checking the Ingress host value was populated logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/4-check-authorized | Try number 0 logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/4-check-authorized | Hostname is with-htpasswd-kuttl-test-workable-hawk.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com logger.go:42: 08:31:55 | examples-openshift-with-htpasswd/4-check-authorized | running command: [sh -c JAEGER_USERNAME=awesomeuser JAEGER_PASSWORD=awesomepassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE with-htpasswd] logger.go:42: 08:31:56 | examples-openshift-with-htpasswd/4-check-authorized | Checking an expected HTTP response logger.go:42: 08:31:56 | examples-openshift-with-htpasswd/4-check-authorized | Running in OpenShift logger.go:42: 08:31:56 | examples-openshift-with-htpasswd/4-check-authorized | Using Jaeger basic authentication logger.go:42: 08:31:56 | examples-openshift-with-htpasswd/4-check-authorized | Try number 1/30 the https://with-htpasswd-kuttl-test-workable-hawk.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:31:56 | examples-openshift-with-htpasswd/4-check-authorized | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 08:31:56 | examples-openshift-with-htpasswd/4-check-authorized | Try number 2/30 the https://with-htpasswd-kuttl-test-workable-hawk.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:31:56 | examples-openshift-with-htpasswd/4-check-authorized | curl response asserted properly logger.go:42: 08:31:56 | examples-openshift-with-htpasswd/4-check-authorized | test step completed 4-check-authorized logger.go:42: 08:31:56 | examples-openshift-with-htpasswd | examples-openshift-with-htpasswd events from ns kuttl-test-workable-hawk: logger.go:42: 08:31:56 | examples-openshift-with-htpasswd | 2023-11-13 08:31:43 +0000 UTC Normal Pod with-htpasswd-676d767997-rrxz9 Binding Scheduled Successfully assigned kuttl-test-workable-hawk/with-htpasswd-676d767997-rrxz9 to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:31:56 | examples-openshift-with-htpasswd | 2023-11-13 08:31:43 +0000 UTC Normal Pod with-htpasswd-676d767997-rrxz9 AddedInterface Add eth0 [10.129.2.56/23] from ovn-kubernetes logger.go:42: 08:31:56 | examples-openshift-with-htpasswd | 2023-11-13 08:31:43 +0000 UTC Normal Pod with-htpasswd-676d767997-rrxz9.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:31:56 | examples-openshift-with-htpasswd | 2023-11-13 08:31:43 +0000 UTC Normal Pod with-htpasswd-676d767997-rrxz9.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:31:56 | examples-openshift-with-htpasswd | 2023-11-13 08:31:43 +0000 UTC Normal Pod with-htpasswd-676d767997-rrxz9.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:31:56 | examples-openshift-with-htpasswd | 2023-11-13 08:31:43 +0000 UTC Normal Pod with-htpasswd-676d767997-rrxz9.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:31:56 | examples-openshift-with-htpasswd | 2023-11-13 08:31:43 +0000 UTC Normal Pod with-htpasswd-676d767997-rrxz9.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:31:56 | examples-openshift-with-htpasswd | 2023-11-13 08:31:43 +0000 UTC Normal Pod with-htpasswd-676d767997-rrxz9.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:31:56 | examples-openshift-with-htpasswd | 2023-11-13 08:31:43 +0000 UTC Normal ReplicaSet.apps with-htpasswd-676d767997 SuccessfulCreate Created pod: with-htpasswd-676d767997-rrxz9 replicaset-controller logger.go:42: 08:31:56 | examples-openshift-with-htpasswd | 2023-11-13 08:31:43 +0000 UTC Normal Deployment.apps with-htpasswd ScalingReplicaSet Scaled up replica set with-htpasswd-676d767997 to 1 deployment-controller logger.go:42: 08:31:56 | examples-openshift-with-htpasswd | Deleting namespace: kuttl-test-workable-hawk === CONT kuttl/harness/examples-openshift-agent-as-daemonset logger.go:42: 08:32:02 | examples-openshift-agent-as-daemonset | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:32:02 | examples-openshift-agent-as-daemonset | Creating namespace: kuttl-test-learning-molly logger.go:42: 08:32:02 | examples-openshift-agent-as-daemonset/0-install | starting test step 0-install logger.go:42: 08:32:02 | examples-openshift-agent-as-daemonset/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 08:32:02 | examples-openshift-agent-as-daemonset/0-install | ServiceAccount:kuttl-test-learning-molly/jaeger-agent-daemonset created logger.go:42: 08:32:02 | examples-openshift-agent-as-daemonset/0-install | test step completed 0-install logger.go:42: 08:32:02 | examples-openshift-agent-as-daemonset/1-add-policy | starting test step 1-add-policy logger.go:42: 08:32:02 | examples-openshift-agent-as-daemonset/1-add-policy | running command: [sh -c oc adm policy --namespace $NAMESPACE add-scc-to-user daemonset-with-hostport -z jaeger-agent-daemonset] logger.go:42: 08:32:02 | examples-openshift-agent-as-daemonset/1-add-policy | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:daemonset-with-hostport added: "jaeger-agent-daemonset" logger.go:42: 08:32:02 | examples-openshift-agent-as-daemonset/1-add-policy | running command: [sh -c sleep 5] logger.go:42: 08:32:07 | examples-openshift-agent-as-daemonset/1-add-policy | test step completed 1-add-policy logger.go:42: 08:32:07 | examples-openshift-agent-as-daemonset/2-install | starting test step 2-install logger.go:42: 08:32:07 | examples-openshift-agent-as-daemonset/2-install | Jaeger:kuttl-test-learning-molly/agent-as-daemonset created logger.go:42: 08:32:13 | examples-openshift-agent-as-daemonset/2-install | test step completed 2-install logger.go:42: 08:32:13 | examples-openshift-agent-as-daemonset/3-install | starting test step 3-install logger.go:42: 08:32:13 | examples-openshift-agent-as-daemonset/3-install | Deployment:kuttl-test-learning-molly/vertx-create-span-sidecar created logger.go:42: 08:32:15 | examples-openshift-agent-as-daemonset/3-install | test step completed 3-install logger.go:42: 08:32:15 | examples-openshift-agent-as-daemonset/4-find-service | starting test step 4-find-service logger.go:42: 08:32:15 | examples-openshift-agent-as-daemonset/4-find-service | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 08:32:17 | examples-openshift-agent-as-daemonset/4-find-service | Warning: resource jaegers/agent-as-daemonset is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:32:23 | examples-openshift-agent-as-daemonset/4-find-service | running command: [sh -c SERVICE_NAME=order ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JOB_NUMBER=00 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o find-service-00-job.yaml] logger.go:42: 08:32:24 | examples-openshift-agent-as-daemonset/4-find-service | running command: [sh -c kubectl create -f find-service-00-job.yaml -n $NAMESPACE] logger.go:42: 08:32:24 | examples-openshift-agent-as-daemonset/4-find-service | job.batch/00-find-service created logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset/4-find-service | test step completed 4-find-service logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | examples-openshift-agent-as-daemonset events from ns kuttl-test-learning-molly: logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:10 +0000 UTC Normal Pod agent-as-daemonset-68c4456dcc-zwddv Binding Scheduled Successfully assigned kuttl-test-learning-molly/agent-as-daemonset-68c4456dcc-zwddv to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:10 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-68c4456dcc SuccessfulCreate Created pod: agent-as-daemonset-68c4456dcc-zwddv replicaset-controller logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:10 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-68c4456dcc to 1 deployment-controller logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal Pod agent-as-daemonset-68c4456dcc-zwddv AddedInterface Add eth0 [10.129.2.57/23] from ovn-kubernetes logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal Pod agent-as-daemonset-68c4456dcc-zwddv.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal Pod agent-as-daemonset-68c4456dcc-zwddv.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal Pod agent-as-daemonset-68c4456dcc-zwddv.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal Pod agent-as-daemonset-68c4456dcc-zwddv.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal Pod agent-as-daemonset-68c4456dcc-zwddv.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal Pod agent-as-daemonset-68c4456dcc-zwddv.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-jcmqc Binding Scheduled Successfully assigned kuttl-test-learning-molly/agent-as-daemonset-agent-daemonset-jcmqc to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-jcmqc AddedInterface Add eth0 [10.128.2.39/23] from ovn-kubernetes logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-jcmqc.spec.containers{jaeger-agent-daemonset} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-kj9r4 Binding Scheduled Successfully assigned kuttl-test-learning-molly/agent-as-daemonset-agent-daemonset-kj9r4 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-kj9r4 AddedInterface Add eth0 [10.131.0.79/23] from ovn-kubernetes logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-kj9r4.spec.containers{jaeger-agent-daemonset} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-kj9r4.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-kj9r4.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-kx7gh Binding Scheduled Successfully assigned kuttl-test-learning-molly/agent-as-daemonset-agent-daemonset-kx7gh to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-kx7gh AddedInterface Add eth0 [10.129.2.58/23] from ovn-kubernetes logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-kx7gh.spec.containers{jaeger-agent-daemonset} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-kx7gh.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-kx7gh.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-kx7gh daemonset-controller logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-jcmqc daemonset-controller logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:11 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-kj9r4 daemonset-controller logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:13 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-jcmqc.spec.containers{jaeger-agent-daemonset} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" in 1.897s (1.897s including waiting) kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:13 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-jcmqc.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:13 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-jcmqc.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:13 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-5drnw Binding Scheduled Successfully assigned kuttl-test-learning-molly/vertx-create-span-sidecar-6c569f6fc6-5drnw to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:13 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-6c569f6fc6 SuccessfulCreate Created pod: vertx-create-span-sidecar-6c569f6fc6-5drnw replicaset-controller logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:13 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-6c569f6fc6 to 1 deployment-controller logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:14 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-5drnw AddedInterface Add eth0 [10.131.0.80/23] from ovn-kubernetes logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:14 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-5drnw.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:14 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-5drnw.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:14 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-5drnw.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:18 +0000 UTC Normal Pod agent-as-daemonset-68c4456dcc-zwddv.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:18 +0000 UTC Normal Pod agent-as-daemonset-68c4456dcc-zwddv.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:18 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-68c4456dcc SuccessfulDelete Deleted pod: agent-as-daemonset-68c4456dcc-zwddv replicaset-controller logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:18 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled down replica set agent-as-daemonset-68c4456dcc to 0 from 1 deployment-controller logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:19 +0000 UTC Normal Pod agent-as-daemonset-85f65b6b8-bdrkg Binding Scheduled Successfully assigned kuttl-test-learning-molly/agent-as-daemonset-85f65b6b8-bdrkg to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:19 +0000 UTC Normal Pod agent-as-daemonset-85f65b6b8-bdrkg AddedInterface Add eth0 [10.129.2.59/23] from ovn-kubernetes logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:19 +0000 UTC Normal Pod agent-as-daemonset-85f65b6b8-bdrkg.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:19 +0000 UTC Normal Pod agent-as-daemonset-85f65b6b8-bdrkg.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:19 +0000 UTC Normal Pod agent-as-daemonset-85f65b6b8-bdrkg.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:19 +0000 UTC Normal Pod agent-as-daemonset-85f65b6b8-bdrkg.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:19 +0000 UTC Normal Pod agent-as-daemonset-85f65b6b8-bdrkg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:19 +0000 UTC Normal Pod agent-as-daemonset-85f65b6b8-bdrkg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:19 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-85f65b6b8 SuccessfulCreate Created pod: agent-as-daemonset-85f65b6b8-bdrkg replicaset-controller logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:19 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-85f65b6b8 to 1 deployment-controller logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:22 +0000 UTC Warning Pod vertx-create-span-sidecar-6c569f6fc6-5drnw.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.80:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:22 +0000 UTC Warning Pod vertx-create-span-sidecar-6c569f6fc6-5drnw.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.80:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:24 +0000 UTC Normal Pod 00-find-service-6sjrz Binding Scheduled Successfully assigned kuttl-test-learning-molly/00-find-service-6sjrz to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:24 +0000 UTC Normal Pod 00-find-service-6sjrz AddedInterface Add eth0 [10.128.2.40/23] from ovn-kubernetes logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:24 +0000 UTC Normal Pod 00-find-service-6sjrz.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:24 +0000 UTC Normal Pod 00-find-service-6sjrz.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:24 +0000 UTC Normal Pod 00-find-service-6sjrz.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:24 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-6sjrz job-controller logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:24 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-5drnw.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:24 +0000 UTC Warning Pod vertx-create-span-sidecar-6c569f6fc6-5drnw.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.80:8080/": read tcp 10.131.0.2:39746->10.131.0.80:8080: read: connection reset by peer kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:24 +0000 UTC Warning Pod vertx-create-span-sidecar-6c569f6fc6-5drnw.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.80:8080/": dial tcp 10.131.0.80:8080: connect: connection refused kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:34 +0000 UTC Warning Pod vertx-create-span-sidecar-6c569f6fc6-5drnw.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.80:8080/": read tcp 10.131.0.2:56456->10.131.0.80:8080: read: connection reset by peer kubelet logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | 2023-11-13 08:32:35 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 08:32:36 | examples-openshift-agent-as-daemonset | Deleting namespace: kuttl-test-learning-molly === CONT kuttl/harness/examples-collector-with-priority-class logger.go:42: 08:32:42 | examples-collector-with-priority-class | Creating namespace: kuttl-test-glad-bull logger.go:42: 08:32:43 | examples-collector-with-priority-class/0-install | starting test step 0-install logger.go:42: 08:32:43 | examples-collector-with-priority-class/0-install | PriorityClass:/collector-high-priority created logger.go:42: 08:32:43 | examples-collector-with-priority-class/0-install | Jaeger:kuttl-test-glad-bull/collector-with-high-priority created logger.go:42: 08:32:49 | examples-collector-with-priority-class/0-install | test step completed 0-install logger.go:42: 08:32:49 | examples-collector-with-priority-class/1-smoke-test | starting test step 1-smoke-test logger.go:42: 08:32:49 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE collector-with-high-priority /dev/null] logger.go:42: 08:32:50 | examples-collector-with-priority-class/1-smoke-test | Warning: resource jaegers/collector-with-high-priority is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:32:57 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:32:58 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:32:58 | examples-collector-with-priority-class/1-smoke-test | job.batch/report-span created logger.go:42: 08:32:58 | examples-collector-with-priority-class/1-smoke-test | job.batch/check-span created logger.go:42: 08:33:10 | examples-collector-with-priority-class/1-smoke-test | test step completed 1-smoke-test logger.go:42: 08:33:10 | examples-collector-with-priority-class | examples-collector-with-priority-class events from ns kuttl-test-glad-bull: logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:46 +0000 UTC Normal Pod collector-with-high-priority-85bfc875f9-q8wtb Binding Scheduled Successfully assigned kuttl-test-glad-bull/collector-with-high-priority-85bfc875f9-q8wtb to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:46 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-85bfc875f9 SuccessfulCreate Created pod: collector-with-high-priority-85bfc875f9-q8wtb replicaset-controller logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:46 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled up replica set collector-with-high-priority-85bfc875f9 to 1 deployment-controller logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:47 +0000 UTC Normal Pod collector-with-high-priority-85bfc875f9-q8wtb AddedInterface Add eth0 [10.131.0.81/23] from ovn-kubernetes logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:47 +0000 UTC Normal Pod collector-with-high-priority-85bfc875f9-q8wtb.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:47 +0000 UTC Normal Pod collector-with-high-priority-85bfc875f9-q8wtb.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:47 +0000 UTC Normal Pod collector-with-high-priority-85bfc875f9-q8wtb.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:47 +0000 UTC Normal Pod collector-with-high-priority-85bfc875f9-q8wtb.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:47 +0000 UTC Normal Pod collector-with-high-priority-85bfc875f9-q8wtb.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:47 +0000 UTC Normal Pod collector-with-high-priority-85bfc875f9-q8wtb.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:52 +0000 UTC Normal Pod collector-with-high-priority-85bfc875f9-q8wtb.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:52 +0000 UTC Normal Pod collector-with-high-priority-85bfc875f9-q8wtb.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:52 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-85bfc875f9 SuccessfulDelete Deleted pod: collector-with-high-priority-85bfc875f9-q8wtb replicaset-controller logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:52 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled down replica set collector-with-high-priority-85bfc875f9 to 0 from 1 deployment-controller logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:53 +0000 UTC Normal Pod collector-with-high-priority-5ff7bcb464-nf4cn Binding Scheduled Successfully assigned kuttl-test-glad-bull/collector-with-high-priority-5ff7bcb464-nf4cn to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:53 +0000 UTC Normal Pod collector-with-high-priority-5ff7bcb464-nf4cn AddedInterface Add eth0 [10.129.2.60/23] from ovn-kubernetes logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:53 +0000 UTC Normal Pod collector-with-high-priority-5ff7bcb464-nf4cn.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:53 +0000 UTC Normal Pod collector-with-high-priority-5ff7bcb464-nf4cn.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:53 +0000 UTC Normal Pod collector-with-high-priority-5ff7bcb464-nf4cn.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:53 +0000 UTC Normal Pod collector-with-high-priority-5ff7bcb464-nf4cn.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:53 +0000 UTC Normal Pod collector-with-high-priority-5ff7bcb464-nf4cn.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:53 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-5ff7bcb464 SuccessfulCreate Created pod: collector-with-high-priority-5ff7bcb464-nf4cn replicaset-controller logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:53 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled up replica set collector-with-high-priority-5ff7bcb464 to 1 deployment-controller logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:54 +0000 UTC Normal Pod collector-with-high-priority-5ff7bcb464-nf4cn.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:58 +0000 UTC Normal Pod check-span-b629b Binding Scheduled Successfully assigned kuttl-test-glad-bull/check-span-b629b to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:58 +0000 UTC Normal Pod check-span-b629b AddedInterface Add eth0 [10.128.2.41/23] from ovn-kubernetes logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:58 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-b629b job-controller logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:58 +0000 UTC Normal Pod report-span-f6jww Binding Scheduled Successfully assigned kuttl-test-glad-bull/report-span-f6jww to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:58 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-f6jww job-controller logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:59 +0000 UTC Normal Pod check-span-b629b.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:59 +0000 UTC Normal Pod check-span-b629b.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:59 +0000 UTC Normal Pod check-span-b629b.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:59 +0000 UTC Normal Pod report-span-f6jww AddedInterface Add eth0 [10.131.0.82/23] from ovn-kubernetes logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:59 +0000 UTC Normal Pod report-span-f6jww.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:59 +0000 UTC Normal Pod report-span-f6jww.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:32:59 +0000 UTC Normal Pod report-span-f6jww.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:33:10 | examples-collector-with-priority-class | 2023-11-13 08:33:09 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:33:10 | examples-collector-with-priority-class | Deleting namespace: kuttl-test-glad-bull === CONT kuttl/harness/examples-all-in-one-with-options logger.go:42: 08:33:22 | examples-all-in-one-with-options | Creating namespace: kuttl-test-genuine-prawn logger.go:42: 08:33:22 | examples-all-in-one-with-options/0-install | starting test step 0-install logger.go:42: 08:33:22 | examples-all-in-one-with-options/0-install | Jaeger:kuttl-test-genuine-prawn/my-jaeger created logger.go:42: 08:33:30 | examples-all-in-one-with-options/0-install | test step completed 0-install logger.go:42: 08:33:30 | examples-all-in-one-with-options/1-smoke-test | starting test step 1-smoke-test logger.go:42: 08:33:30 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 08:33:31 | examples-all-in-one-with-options/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:33:38 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443/jaeger MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:33:38 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:33:39 | examples-all-in-one-with-options/1-smoke-test | job.batch/report-span created logger.go:42: 08:33:39 | examples-all-in-one-with-options/1-smoke-test | job.batch/check-span created logger.go:42: 08:33:50 | examples-all-in-one-with-options/1-smoke-test | test step completed 1-smoke-test logger.go:42: 08:33:50 | examples-all-in-one-with-options | examples-all-in-one-with-options events from ns kuttl-test-genuine-prawn: logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:26 +0000 UTC Normal Pod my-jaeger-f7476dd48-wndnw Binding Scheduled Successfully assigned kuttl-test-genuine-prawn/my-jaeger-f7476dd48-wndnw to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:26 +0000 UTC Warning Pod my-jaeger-f7476dd48-wndnw FailedMount MountVolume.SetUp failed for volume "my-jaeger-ui-oauth-proxy-tls" : secret "my-jaeger-ui-oauth-proxy-tls" not found kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:26 +0000 UTC Warning Pod my-jaeger-f7476dd48-wndnw FailedMount MountVolume.SetUp failed for volume "my-jaeger-collector-tls-config-volume" : secret "my-jaeger-collector-headless-tls" not found kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:26 +0000 UTC Normal ReplicaSet.apps my-jaeger-f7476dd48 SuccessfulCreate Created pod: my-jaeger-f7476dd48-wndnw replicaset-controller logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:26 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-f7476dd48 to 1 deployment-controller logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:27 +0000 UTC Normal Pod my-jaeger-f7476dd48-wndnw AddedInterface Add eth0 [10.129.2.61/23] from ovn-kubernetes logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:27 +0000 UTC Normal Pod my-jaeger-f7476dd48-wndnw.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:27 +0000 UTC Normal Pod my-jaeger-f7476dd48-wndnw.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:27 +0000 UTC Normal Pod my-jaeger-f7476dd48-wndnw.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:27 +0000 UTC Normal Pod my-jaeger-f7476dd48-wndnw.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:27 +0000 UTC Normal Pod my-jaeger-f7476dd48-wndnw.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:27 +0000 UTC Normal Pod my-jaeger-f7476dd48-wndnw.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:35 +0000 UTC Normal Pod my-jaeger-f7476dd48-wndnw.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:35 +0000 UTC Normal Pod my-jaeger-f7476dd48-wndnw.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:35 +0000 UTC Normal ReplicaSet.apps my-jaeger-f7476dd48 SuccessfulDelete Deleted pod: my-jaeger-f7476dd48-wndnw replicaset-controller logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:35 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-f7476dd48 to 0 from 1 deployment-controller logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:36 +0000 UTC Normal Pod my-jaeger-6f95699d87-x8cxg Binding Scheduled Successfully assigned kuttl-test-genuine-prawn/my-jaeger-6f95699d87-x8cxg to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:36 +0000 UTC Normal Pod my-jaeger-6f95699d87-x8cxg AddedInterface Add eth0 [10.131.0.83/23] from ovn-kubernetes logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:36 +0000 UTC Normal Pod my-jaeger-6f95699d87-x8cxg.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:36 +0000 UTC Normal Pod my-jaeger-6f95699d87-x8cxg.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:36 +0000 UTC Normal Pod my-jaeger-6f95699d87-x8cxg.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:36 +0000 UTC Normal Pod my-jaeger-6f95699d87-x8cxg.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:36 +0000 UTC Normal Pod my-jaeger-6f95699d87-x8cxg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:36 +0000 UTC Normal Pod my-jaeger-6f95699d87-x8cxg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:36 +0000 UTC Normal ReplicaSet.apps my-jaeger-6f95699d87 SuccessfulCreate Created pod: my-jaeger-6f95699d87-x8cxg replicaset-controller logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:36 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-6f95699d87 to 1 deployment-controller logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:37 +0000 UTC Warning Endpoints my-jaeger-agent FailedToUpdateEndpoint Failed to update endpoint kuttl-test-genuine-prawn/my-jaeger-agent: Operation cannot be fulfilled on endpoints "my-jaeger-agent": the object has been modified; please apply your changes to the latest version and try again endpoint-controller logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:37 +0000 UTC Warning Endpoints my-jaeger-query FailedToUpdateEndpoint Failed to update endpoint kuttl-test-genuine-prawn/my-jaeger-query: Operation cannot be fulfilled on endpoints "my-jaeger-query": the object has been modified; please apply your changes to the latest version and try again endpoint-controller logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:39 +0000 UTC Normal Pod check-span-xwnmw Binding Scheduled Successfully assigned kuttl-test-genuine-prawn/check-span-xwnmw to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:39 +0000 UTC Normal Pod check-span-xwnmw AddedInterface Add eth0 [10.128.2.42/23] from ovn-kubernetes logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:39 +0000 UTC Normal Pod check-span-xwnmw.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:39 +0000 UTC Normal Pod check-span-xwnmw.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:39 +0000 UTC Normal Pod check-span-xwnmw.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:39 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-xwnmw job-controller logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:39 +0000 UTC Normal Pod report-span-2fmz9 Binding Scheduled Successfully assigned kuttl-test-genuine-prawn/report-span-2fmz9 to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:39 +0000 UTC Normal Pod report-span-2fmz9 AddedInterface Add eth0 [10.129.2.62/23] from ovn-kubernetes logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:39 +0000 UTC Normal Pod report-span-2fmz9.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:39 +0000 UTC Normal Pod report-span-2fmz9.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:39 +0000 UTC Normal Pod report-span-2fmz9.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:39 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-2fmz9 job-controller logger.go:42: 08:33:50 | examples-all-in-one-with-options | 2023-11-13 08:33:50 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:33:50 | examples-all-in-one-with-options | Deleting namespace: kuttl-test-genuine-prawn === CONT kuttl/harness/examples-auto-provision-kafka logger.go:42: 08:33:56 | examples-auto-provision-kafka | Creating namespace: kuttl-test-accurate-lab logger.go:42: 08:33:56 | examples-auto-provision-kafka/2-install | starting test step 2-install logger.go:42: 08:33:56 | examples-auto-provision-kafka/2-install | Jaeger:kuttl-test-accurate-lab/auto-provision-kafka created logger.go:42: 08:37:28 | examples-auto-provision-kafka/2-install | test step completed 2-install logger.go:42: 08:37:28 | examples-auto-provision-kafka/3- | starting test step 3- logger.go:42: 08:38:03 | examples-auto-provision-kafka/3- | test step completed 3- logger.go:42: 08:38:03 | examples-auto-provision-kafka/4- | starting test step 4- logger.go:42: 08:38:24 | examples-auto-provision-kafka/4- | test step completed 4- logger.go:42: 08:38:24 | examples-auto-provision-kafka/5- | starting test step 5- logger.go:42: 08:38:32 | examples-auto-provision-kafka/5- | test step completed 5- logger.go:42: 08:38:32 | examples-auto-provision-kafka/6-smoke-test | starting test step 6-smoke-test logger.go:42: 08:38:32 | examples-auto-provision-kafka/6-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE auto-provision-kafka /dev/null] logger.go:42: 08:38:34 | examples-auto-provision-kafka/6-smoke-test | Warning: resource jaegers/auto-provision-kafka is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:38:40 | examples-auto-provision-kafka/6-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JAEGER_COLLECTOR_ENDPOINT=http://auto-provision-kafka-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://auto-provision-kafka-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:38:41 | examples-auto-provision-kafka/6-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:38:41 | examples-auto-provision-kafka/6-smoke-test | job.batch/report-span created logger.go:42: 08:38:41 | examples-auto-provision-kafka/6-smoke-test | job.batch/check-span created logger.go:42: 08:38:54 | examples-auto-provision-kafka/6-smoke-test | test step completed 6-smoke-test logger.go:42: 08:38:54 | examples-auto-provision-kafka | examples-auto-provision-kafka events from ns kuttl-test-accurate-lab: logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:34:03 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestaccuratelabautoprovisionkafk-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestaccuratelabautoprovisionkafk-1-57597854d6 to 1 deployment-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:34:04 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestaccuratelabautoprovisionkafk-1-57597854d6 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestaccuratelabautoprovisionkafk-1-dj76w replicaset-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:34:04 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestaccuratelabautoprovisionkafk-1-dj76w Binding Scheduled Successfully assigned kuttl-test-accurate-lab/elasticsearch-cdm-kuttltestaccuratelabautoprovisionkafk-1-dj76w to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:34:04 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestaccuratelabautoprovisionkafk-1-dj76w AddedInterface Add eth0 [10.128.2.43/23] from ovn-kubernetes logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:34:04 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestaccuratelabautoprovisionkafk-1-dj76w.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:34:04 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestaccuratelabautoprovisionkafk-1-dj76w.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:34:04 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestaccuratelabautoprovisionkafk-1-dj76w.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:34:04 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestaccuratelabautoprovisionkafk-1-dj76w.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:34:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestaccuratelabautoprovisionkafk-1-dj76w.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:34:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestaccuratelabautoprovisionkafk-1-dj76w.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:34:14 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestaccuratelabautoprovisionkafk-1-dj76w.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:34:19 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestaccuratelabautoprovisionkafk-1-dj76w.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:34:32 +0000 UTC Normal PodDisruptionBudget.policy auto-provision-kafka-zookeeper NoPods No matching pods found controllermanager logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:34:32 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:34:32 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:34:32 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-accurate-lab/data-auto-provision-kafka-zookeeper-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-7bf58cbfc9-shhxm_a95d5e43-dac0-4be3-9233-4b6d6f41a8b3 logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:36:55 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-accurate-lab/data-auto-provision-kafka-zookeeper-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-7bf58cbfc9-shhxm_b3413021-d802-4087-93bb-eb7ae36e5561 logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:36:58 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-106ecc40-3229-4618-b833-168ed2d6bd1e ebs.csi.aws.com_aws-ebs-csi-driver-controller-7bf58cbfc9-shhxm_b3413021-d802-4087-93bb-eb7ae36e5561 logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:36:59 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-accurate-lab/auto-provision-kafka-zookeeper-0 to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:37:01 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-106ecc40-3229-4618-b833-168ed2d6bd1e" attachdetach-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:37:07 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0 AddedInterface Add eth0 [10.129.2.63/23] from ovn-kubernetes logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:37:08 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0.spec.containers{zookeeper} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" already present on machine kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:37:08 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:37:08 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:37:29 +0000 UTC Normal PodDisruptionBudget.policy auto-provision-kafka-kafka NoPods No matching pods found controllermanager logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:37:29 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:37:29 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:37:29 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-accurate-lab/data-0-auto-provision-kafka-kafka-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-7bf58cbfc9-shhxm_b3413021-d802-4087-93bb-eb7ae36e5561 logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:37:32 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-3e5e078b-c460-45ec-aa90-c5c7f9b7d0d7 ebs.csi.aws.com_aws-ebs-csi-driver-controller-7bf58cbfc9-shhxm_b3413021-d802-4087-93bb-eb7ae36e5561 logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:37:33 +0000 UTC Normal Pod auto-provision-kafka-kafka-0 Binding Scheduled Successfully assigned kuttl-test-accurate-lab/auto-provision-kafka-kafka-0 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:37:35 +0000 UTC Normal Pod auto-provision-kafka-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-3e5e078b-c460-45ec-aa90-c5c7f9b7d0d7" attachdetach-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:37:42 +0000 UTC Normal Pod auto-provision-kafka-kafka-0 AddedInterface Add eth0 [10.131.0.84/23] from ovn-kubernetes logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:37:43 +0000 UTC Normal Pod auto-provision-kafka-kafka-0.spec.containers{kafka} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" already present on machine kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:37:43 +0000 UTC Normal Pod auto-provision-kafka-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:37:43 +0000 UTC Normal Pod auto-provision-kafka-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:03 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-8694db4d75-2d7pj Binding Scheduled Successfully assigned kuttl-test-accurate-lab/auto-provision-kafka-entity-operator-8694db4d75-2d7pj to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:03 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-8694db4d75-2d7pj AddedInterface Add eth0 [10.131.0.85/23] from ovn-kubernetes logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:03 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-8694db4d75-2d7pj.spec.containers{topic-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:464b04e622e0b3472e8a1e1ce8a2efd32cf27fc2056d3d589bfe6b5f9ac0bf4e" already present on machine kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:03 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-entity-operator-8694db4d75 SuccessfulCreate Created pod: auto-provision-kafka-entity-operator-8694db4d75-2d7pj replicaset-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:03 +0000 UTC Normal Deployment.apps auto-provision-kafka-entity-operator ScalingReplicaSet Scaled up replica set auto-provision-kafka-entity-operator-8694db4d75 to 1 deployment-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:04 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-8694db4d75-2d7pj.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:04 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-8694db4d75-2d7pj.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:04 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-8694db4d75-2d7pj.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:464b04e622e0b3472e8a1e1ce8a2efd32cf27fc2056d3d589bfe6b5f9ac0bf4e" already present on machine kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:04 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-8694db4d75-2d7pj.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:04 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-8694db4d75-2d7pj.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:04 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-8694db4d75-2d7pj.spec.containers{tls-sidecar} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" already present on machine kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:04 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-8694db4d75-2d7pj.spec.containers{tls-sidecar} Created Created container tls-sidecar kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:04 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-8694db4d75-2d7pj.spec.containers{tls-sidecar} Started Started container tls-sidecar kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:26 +0000 UTC Normal Pod auto-provision-kafka-collector-59b5996646-2bz8b Binding Scheduled Successfully assigned kuttl-test-accurate-lab/auto-provision-kafka-collector-59b5996646-2bz8b to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:26 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-collector-59b5996646 SuccessfulCreate Created pod: auto-provision-kafka-collector-59b5996646-2bz8b replicaset-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:26 +0000 UTC Normal Deployment.apps auto-provision-kafka-collector ScalingReplicaSet Scaled up replica set auto-provision-kafka-collector-59b5996646 to 1 deployment-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:27 +0000 UTC Warning Pod auto-provision-kafka-collector-59b5996646-2bz8b FailedMount MountVolume.SetUp failed for volume "auto-provision-kafka-collector-tls-config-volume" : secret "auto-provision-kafka-collector-headless-tls" not found kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:27 +0000 UTC Normal Pod auto-provision-kafka-collector-59b5996646-2bz8b AddedInterface Add eth0 [10.129.2.64/23] from ovn-kubernetes logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:27 +0000 UTC Normal Pod auto-provision-kafka-ingester-646459cc9d-mb4gb Binding Scheduled Successfully assigned kuttl-test-accurate-lab/auto-provision-kafka-ingester-646459cc9d-mb4gb to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:27 +0000 UTC Normal Pod auto-provision-kafka-ingester-646459cc9d-mb4gb AddedInterface Add eth0 [10.129.2.65/23] from ovn-kubernetes logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:27 +0000 UTC Normal Pod auto-provision-kafka-ingester-646459cc9d-mb4gb.spec.containers{jaeger-ingester} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:109a3676bfaccda0021c57bbe82bceed5140faaedb8bad4d7d2be7cd660de039" kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:27 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-ingester-646459cc9d SuccessfulCreate Created pod: auto-provision-kafka-ingester-646459cc9d-mb4gb replicaset-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:27 +0000 UTC Normal Deployment.apps auto-provision-kafka-ingester ScalingReplicaSet Scaled up replica set auto-provision-kafka-ingester-646459cc9d to 1 deployment-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:27 +0000 UTC Normal Pod auto-provision-kafka-query-b9d5dc4b5-ktzfx Binding Scheduled Successfully assigned kuttl-test-accurate-lab/auto-provision-kafka-query-b9d5dc4b5-ktzfx to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:27 +0000 UTC Normal Pod auto-provision-kafka-query-b9d5dc4b5-ktzfx AddedInterface Add eth0 [10.129.2.66/23] from ovn-kubernetes logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:27 +0000 UTC Normal Pod auto-provision-kafka-query-b9d5dc4b5-ktzfx.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:27 +0000 UTC Normal Pod auto-provision-kafka-query-b9d5dc4b5-ktzfx.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:27 +0000 UTC Normal Pod auto-provision-kafka-query-b9d5dc4b5-ktzfx.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:27 +0000 UTC Normal Pod auto-provision-kafka-query-b9d5dc4b5-ktzfx.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:27 +0000 UTC Normal Pod auto-provision-kafka-query-b9d5dc4b5-ktzfx.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:27 +0000 UTC Normal Pod auto-provision-kafka-query-b9d5dc4b5-ktzfx.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:27 +0000 UTC Normal Pod auto-provision-kafka-query-b9d5dc4b5-ktzfx.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:27 +0000 UTC Normal Pod auto-provision-kafka-query-b9d5dc4b5-ktzfx.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:27 +0000 UTC Normal Pod auto-provision-kafka-query-b9d5dc4b5-ktzfx.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:27 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-query-b9d5dc4b5 SuccessfulCreate Created pod: auto-provision-kafka-query-b9d5dc4b5-ktzfx replicaset-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:27 +0000 UTC Normal Deployment.apps auto-provision-kafka-query ScalingReplicaSet Scaled up replica set auto-provision-kafka-query-b9d5dc4b5 to 1 deployment-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:28 +0000 UTC Normal Pod auto-provision-kafka-collector-59b5996646-2bz8b.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:28 +0000 UTC Normal Pod auto-provision-kafka-collector-59b5996646-2bz8b.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:28 +0000 UTC Normal Pod auto-provision-kafka-collector-59b5996646-2bz8b.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:30 +0000 UTC Normal Pod auto-provision-kafka-ingester-646459cc9d-mb4gb.spec.containers{jaeger-ingester} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:109a3676bfaccda0021c57bbe82bceed5140faaedb8bad4d7d2be7cd660de039" in 2.952s (2.952s including waiting) kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:30 +0000 UTC Normal Pod auto-provision-kafka-ingester-646459cc9d-mb4gb.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:30 +0000 UTC Normal Pod auto-provision-kafka-ingester-646459cc9d-mb4gb.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:38 +0000 UTC Normal Pod auto-provision-kafka-query-56d89c7499-2lv6f Binding Scheduled Successfully assigned kuttl-test-accurate-lab/auto-provision-kafka-query-56d89c7499-2lv6f to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:38 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-query-56d89c7499 SuccessfulCreate Created pod: auto-provision-kafka-query-56d89c7499-2lv6f replicaset-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:38 +0000 UTC Normal Pod auto-provision-kafka-query-b9d5dc4b5-ktzfx.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:38 +0000 UTC Normal Pod auto-provision-kafka-query-b9d5dc4b5-ktzfx.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:38 +0000 UTC Normal Pod auto-provision-kafka-query-b9d5dc4b5-ktzfx.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:38 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-query-b9d5dc4b5 SuccessfulDelete Deleted pod: auto-provision-kafka-query-b9d5dc4b5-ktzfx replicaset-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:38 +0000 UTC Normal Deployment.apps auto-provision-kafka-query ScalingReplicaSet Scaled down replica set auto-provision-kafka-query-b9d5dc4b5 to 0 from 1 deployment-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:38 +0000 UTC Normal Deployment.apps auto-provision-kafka-query ScalingReplicaSet Scaled up replica set auto-provision-kafka-query-56d89c7499 to 1 deployment-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:39 +0000 UTC Normal Pod auto-provision-kafka-query-56d89c7499-2lv6f AddedInterface Add eth0 [10.129.2.67/23] from ovn-kubernetes logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:39 +0000 UTC Normal Pod auto-provision-kafka-query-56d89c7499-2lv6f.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:39 +0000 UTC Normal Pod auto-provision-kafka-query-56d89c7499-2lv6f.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:39 +0000 UTC Normal Pod auto-provision-kafka-query-56d89c7499-2lv6f.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:39 +0000 UTC Normal Pod auto-provision-kafka-query-56d89c7499-2lv6f.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:39 +0000 UTC Normal Pod auto-provision-kafka-query-56d89c7499-2lv6f.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:39 +0000 UTC Normal Pod auto-provision-kafka-query-56d89c7499-2lv6f.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:39 +0000 UTC Normal Pod auto-provision-kafka-query-56d89c7499-2lv6f.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:39 +0000 UTC Normal Pod auto-provision-kafka-query-56d89c7499-2lv6f.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:39 +0000 UTC Normal Pod auto-provision-kafka-query-56d89c7499-2lv6f.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:41 +0000 UTC Normal Pod check-span-6zz6k Binding Scheduled Successfully assigned kuttl-test-accurate-lab/check-span-6zz6k to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:41 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-6zz6k job-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:41 +0000 UTC Normal Pod report-span-2p92z Binding Scheduled Successfully assigned kuttl-test-accurate-lab/report-span-2p92z to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:41 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-2p92z job-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:42 +0000 UTC Normal Pod check-span-6zz6k AddedInterface Add eth0 [10.131.0.87/23] from ovn-kubernetes logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:42 +0000 UTC Normal Pod check-span-6zz6k.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:42 +0000 UTC Normal Pod check-span-6zz6k.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:42 +0000 UTC Normal Pod check-span-6zz6k.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:42 +0000 UTC Normal Pod report-span-2p92z AddedInterface Add eth0 [10.131.0.86/23] from ovn-kubernetes logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:42 +0000 UTC Normal Pod report-span-2p92z.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:42 +0000 UTC Normal Pod report-span-2p92z.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:42 +0000 UTC Normal Pod report-span-2p92z.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:38:54 | examples-auto-provision-kafka | 2023-11-13 08:38:53 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:38:54 | examples-auto-provision-kafka | Deleting namespace: kuttl-test-accurate-lab === CONT kuttl/harness/examples-agent-with-priority-class logger.go:42: 08:39:28 | examples-agent-with-priority-class | Creating namespace: kuttl-test-dear-loon logger.go:42: 08:39:28 | examples-agent-with-priority-class/0-install | starting test step 0-install logger.go:42: 08:39:28 | examples-agent-with-priority-class/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 08:39:28 | examples-agent-with-priority-class/0-install | ServiceAccount:kuttl-test-dear-loon/jaeger-agent-daemonset created logger.go:42: 08:39:28 | examples-agent-with-priority-class/0-install | test step completed 0-install logger.go:42: 08:39:28 | examples-agent-with-priority-class/1-install | starting test step 1-install logger.go:42: 08:39:29 | examples-agent-with-priority-class/1-install | PriorityClass:/high-priority created logger.go:42: 08:39:29 | examples-agent-with-priority-class/1-install | Jaeger:kuttl-test-dear-loon/agent-as-daemonset created logger.go:42: 08:39:35 | examples-agent-with-priority-class/1-install | test step completed 1-install logger.go:42: 08:39:35 | examples-agent-with-priority-class/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:39:35 | examples-agent-with-priority-class/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 08:39:36 | examples-agent-with-priority-class/2-smoke-test | Warning: resource jaegers/agent-as-daemonset is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:39:43 | examples-agent-with-priority-class/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:39:43 | examples-agent-with-priority-class/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:39:44 | examples-agent-with-priority-class/2-smoke-test | job.batch/report-span created logger.go:42: 08:39:44 | examples-agent-with-priority-class/2-smoke-test | job.batch/check-span created logger.go:42: 08:39:55 | examples-agent-with-priority-class/2-smoke-test | test step completed 2-smoke-test logger.go:42: 08:39:55 | examples-agent-with-priority-class | examples-agent-with-priority-class events from ns kuttl-test-dear-loon: logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:32 +0000 UTC Normal Pod agent-as-daemonset-6dff6d5f8f-npvf5 Binding Scheduled Successfully assigned kuttl-test-dear-loon/agent-as-daemonset-6dff6d5f8f-npvf5 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:32 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-6dff6d5f8f SuccessfulCreate Created pod: agent-as-daemonset-6dff6d5f8f-npvf5 replicaset-controller logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:32 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-6dff6d5f8f to 1 deployment-controller logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:33 +0000 UTC Normal Pod agent-as-daemonset-6dff6d5f8f-npvf5 AddedInterface Add eth0 [10.131.0.88/23] from ovn-kubernetes logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:33 +0000 UTC Normal Pod agent-as-daemonset-6dff6d5f8f-npvf5.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:33 +0000 UTC Normal Pod agent-as-daemonset-6dff6d5f8f-npvf5.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:33 +0000 UTC Normal Pod agent-as-daemonset-6dff6d5f8f-npvf5.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:33 +0000 UTC Normal Pod agent-as-daemonset-6dff6d5f8f-npvf5.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:33 +0000 UTC Normal Pod agent-as-daemonset-6dff6d5f8f-npvf5.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:33 +0000 UTC Normal Pod agent-as-daemonset-6dff6d5f8f-npvf5.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:34 +0000 UTC Warning DaemonSet.apps agent-as-daemonset-agent-daemonset FailedCreate Error creating: pods "agent-as-daemonset-agent-daemonset-" is forbidden: unable to validate against any security context constraint: [provider "anyuid": Forbidden: not usable by user or serviceaccount, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5775: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5778: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6831: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6832: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 14271: Host ports are not allowed to be used, provider "restricted": Forbidden: not usable by user or serviceaccount, provider "nonroot-v2": Forbidden: not usable by user or serviceaccount, provider "nonroot": Forbidden: not usable by user or serviceaccount, provider "hostmount-anyuid": Forbidden: not usable by user or serviceaccount, provider "elasticsearch-scc": Forbidden: not usable by user or serviceaccount, provider "machine-api-termination-handler": Forbidden: not usable by user or serviceaccount, provider "daemonset-with-hostport": Forbidden: not usable by user or serviceaccount, provider "hostnetwork-v2": Forbidden: not usable by user or serviceaccount, provider "hostnetwork": Forbidden: not usable by user or serviceaccount, provider "hostaccess": Forbidden: not usable by user or serviceaccount, provider "node-exporter": Forbidden: not usable by user or serviceaccount, provider "privileged": Forbidden: not usable by user or serviceaccount] daemonset-controller logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:38 +0000 UTC Normal Pod agent-as-daemonset-6dff6d5f8f-npvf5.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:38 +0000 UTC Normal Pod agent-as-daemonset-6dff6d5f8f-npvf5.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:38 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-6dff6d5f8f SuccessfulDelete Deleted pod: agent-as-daemonset-6dff6d5f8f-npvf5 replicaset-controller logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:38 +0000 UTC Normal Pod agent-as-daemonset-7d69bf5c7f-4l75j Binding Scheduled Successfully assigned kuttl-test-dear-loon/agent-as-daemonset-7d69bf5c7f-4l75j to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:38 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-7d69bf5c7f SuccessfulCreate Created pod: agent-as-daemonset-7d69bf5c7f-4l75j replicaset-controller logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:38 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled down replica set agent-as-daemonset-6dff6d5f8f to 0 from 1 deployment-controller logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:38 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-7d69bf5c7f to 1 deployment-controller logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:39 +0000 UTC Normal Pod agent-as-daemonset-7d69bf5c7f-4l75j AddedInterface Add eth0 [10.131.0.89/23] from ovn-kubernetes logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:39 +0000 UTC Normal Pod agent-as-daemonset-7d69bf5c7f-4l75j.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:39 +0000 UTC Normal Pod agent-as-daemonset-7d69bf5c7f-4l75j.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:39 +0000 UTC Normal Pod agent-as-daemonset-7d69bf5c7f-4l75j.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:39 +0000 UTC Normal Pod agent-as-daemonset-7d69bf5c7f-4l75j.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:39 +0000 UTC Normal Pod agent-as-daemonset-7d69bf5c7f-4l75j.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:39 +0000 UTC Normal Pod agent-as-daemonset-7d69bf5c7f-4l75j.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:40 +0000 UTC Warning Pod agent-as-daemonset-7d69bf5c7f-4l75j FailedMount MountVolume.SetUp failed for volume "agent-as-daemonset-service-ca" : configmap references non-existent config key: service-ca.crt kubelet logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:44 +0000 UTC Normal Pod check-span-wsn9k Binding Scheduled Successfully assigned kuttl-test-dear-loon/check-span-wsn9k to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:44 +0000 UTC Normal Pod check-span-wsn9k AddedInterface Add eth0 [10.129.2.69/23] from ovn-kubernetes logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:44 +0000 UTC Normal Pod check-span-wsn9k.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:44 +0000 UTC Normal Pod check-span-wsn9k.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:44 +0000 UTC Normal Pod check-span-wsn9k.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:44 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-wsn9k job-controller logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:44 +0000 UTC Normal Pod report-span-mpztx Binding Scheduled Successfully assigned kuttl-test-dear-loon/report-span-mpztx to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:44 +0000 UTC Normal Pod report-span-mpztx AddedInterface Add eth0 [10.129.2.68/23] from ovn-kubernetes logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:44 +0000 UTC Normal Pod report-span-mpztx.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:44 +0000 UTC Normal Pod report-span-mpztx.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:44 +0000 UTC Normal Pod report-span-mpztx.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:44 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-mpztx job-controller logger.go:42: 08:39:55 | examples-agent-with-priority-class | 2023-11-13 08:39:55 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:39:55 | examples-agent-with-priority-class | Deleting namespace: kuttl-test-dear-loon === CONT kuttl/harness/examples-agent-as-daemonset logger.go:42: 08:40:07 | examples-agent-as-daemonset | Creating namespace: kuttl-test-liberal-hare logger.go:42: 08:40:07 | examples-agent-as-daemonset/0-install | starting test step 0-install logger.go:42: 08:40:07 | examples-agent-as-daemonset/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 08:40:07 | examples-agent-as-daemonset/0-install | ServiceAccount:kuttl-test-liberal-hare/jaeger-agent-daemonset created logger.go:42: 08:40:07 | examples-agent-as-daemonset/0-install | test step completed 0-install logger.go:42: 08:40:07 | examples-agent-as-daemonset/1-install | starting test step 1-install logger.go:42: 08:40:07 | examples-agent-as-daemonset/1-install | Jaeger:kuttl-test-liberal-hare/agent-as-daemonset created logger.go:42: 08:40:12 | examples-agent-as-daemonset/1-install | test step completed 1-install logger.go:42: 08:40:12 | examples-agent-as-daemonset/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:40:12 | examples-agent-as-daemonset/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 08:40:14 | examples-agent-as-daemonset/2-smoke-test | Warning: resource jaegers/agent-as-daemonset is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:40:20 | examples-agent-as-daemonset/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:40:21 | examples-agent-as-daemonset/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:40:21 | examples-agent-as-daemonset/2-smoke-test | job.batch/report-span created logger.go:42: 08:40:21 | examples-agent-as-daemonset/2-smoke-test | job.batch/check-span created logger.go:42: 08:40:33 | examples-agent-as-daemonset/2-smoke-test | test step completed 2-smoke-test logger.go:42: 08:40:33 | examples-agent-as-daemonset | examples-agent-as-daemonset events from ns kuttl-test-liberal-hare: logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:10 +0000 UTC Normal Pod agent-as-daemonset-7b5d6d7458-fw6zm Binding Scheduled Successfully assigned kuttl-test-liberal-hare/agent-as-daemonset-7b5d6d7458-fw6zm to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:10 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-7b5d6d7458 SuccessfulCreate Created pod: agent-as-daemonset-7b5d6d7458-fw6zm replicaset-controller logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:10 +0000 UTC Warning DaemonSet.apps agent-as-daemonset-agent-daemonset FailedCreate Error creating: pods "agent-as-daemonset-agent-daemonset-" is forbidden: unable to validate against any security context constraint: [provider "anyuid": Forbidden: not usable by user or serviceaccount, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5775: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5778: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6831: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6832: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 14271: Host ports are not allowed to be used, provider "restricted": Forbidden: not usable by user or serviceaccount, provider "nonroot-v2": Forbidden: not usable by user or serviceaccount, provider "nonroot": Forbidden: not usable by user or serviceaccount, provider "hostmount-anyuid": Forbidden: not usable by user or serviceaccount, provider "elasticsearch-scc": Forbidden: not usable by user or serviceaccount, provider "machine-api-termination-handler": Forbidden: not usable by user or serviceaccount, provider "daemonset-with-hostport": Forbidden: not usable by user or serviceaccount, provider "hostnetwork-v2": Forbidden: not usable by user or serviceaccount, provider "hostnetwork": Forbidden: not usable by user or serviceaccount, provider "hostaccess": Forbidden: not usable by user or serviceaccount, provider "node-exporter": Forbidden: not usable by user or serviceaccount, provider "privileged": Forbidden: not usable by user or serviceaccount] daemonset-controller logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:10 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-7b5d6d7458 to 1 deployment-controller logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:11 +0000 UTC Normal Pod agent-as-daemonset-7b5d6d7458-fw6zm AddedInterface Add eth0 [10.129.2.70/23] from ovn-kubernetes logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:11 +0000 UTC Normal Pod agent-as-daemonset-7b5d6d7458-fw6zm.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:11 +0000 UTC Normal Pod agent-as-daemonset-7b5d6d7458-fw6zm.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:11 +0000 UTC Normal Pod agent-as-daemonset-7b5d6d7458-fw6zm.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:11 +0000 UTC Normal Pod agent-as-daemonset-7b5d6d7458-fw6zm.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:11 +0000 UTC Normal Pod agent-as-daemonset-7b5d6d7458-fw6zm.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:11 +0000 UTC Normal Pod agent-as-daemonset-7b5d6d7458-fw6zm.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:15 +0000 UTC Normal Pod agent-as-daemonset-7b5d6d7458-fw6zm.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:15 +0000 UTC Normal Pod agent-as-daemonset-7b5d6d7458-fw6zm.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:15 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-7b5d6d7458 SuccessfulDelete Deleted pod: agent-as-daemonset-7b5d6d7458-fw6zm replicaset-controller logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:15 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled down replica set agent-as-daemonset-7b5d6d7458 to 0 from 1 deployment-controller logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:16 +0000 UTC Normal Pod agent-as-daemonset-5b567b8955-t7vp4 Binding Scheduled Successfully assigned kuttl-test-liberal-hare/agent-as-daemonset-5b567b8955-t7vp4 to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:16 +0000 UTC Normal Pod agent-as-daemonset-5b567b8955-t7vp4 AddedInterface Add eth0 [10.129.2.71/23] from ovn-kubernetes logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:16 +0000 UTC Normal Pod agent-as-daemonset-5b567b8955-t7vp4.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:16 +0000 UTC Normal Pod agent-as-daemonset-5b567b8955-t7vp4.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:16 +0000 UTC Normal Pod agent-as-daemonset-5b567b8955-t7vp4.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:16 +0000 UTC Normal Pod agent-as-daemonset-5b567b8955-t7vp4.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:16 +0000 UTC Normal Pod agent-as-daemonset-5b567b8955-t7vp4.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:16 +0000 UTC Normal Pod agent-as-daemonset-5b567b8955-t7vp4.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:16 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-5b567b8955 SuccessfulCreate Created pod: agent-as-daemonset-5b567b8955-t7vp4 replicaset-controller logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:16 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-5b567b8955 to 1 deployment-controller logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:21 +0000 UTC Normal Pod check-span-mvr7b Binding Scheduled Successfully assigned kuttl-test-liberal-hare/check-span-mvr7b to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:21 +0000 UTC Normal Pod check-span-mvr7b AddedInterface Add eth0 [10.128.2.44/23] from ovn-kubernetes logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:21 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-mvr7b job-controller logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:21 +0000 UTC Normal Pod report-span-tzblj Binding Scheduled Successfully assigned kuttl-test-liberal-hare/report-span-tzblj to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:21 +0000 UTC Normal Pod report-span-tzblj AddedInterface Add eth0 [10.131.0.90/23] from ovn-kubernetes logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:21 +0000 UTC Normal Pod report-span-tzblj.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:21 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-tzblj job-controller logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:22 +0000 UTC Normal Pod check-span-mvr7b.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:22 +0000 UTC Normal Pod check-span-mvr7b.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:22 +0000 UTC Normal Pod check-span-mvr7b.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:22 +0000 UTC Normal Pod report-span-tzblj.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:22 +0000 UTC Normal Pod report-span-tzblj.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:40:33 | examples-agent-as-daemonset | 2023-11-13 08:40:33 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:40:33 | examples-agent-as-daemonset | Deleting namespace: kuttl-test-liberal-hare === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- FAIL: kuttl (1707.22s) --- FAIL: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.81s) --- PASS: kuttl/harness/examples-service-types (52.61s) --- PASS: kuttl/harness/examples-with-sampling (306.00s) --- PASS: kuttl/harness/examples-with-cassandra (96.48s) --- FAIL: kuttl/harness/examples-with-badger-and-volume (443.64s) --- PASS: kuttl/harness/examples-with-badger (39.53s) --- PASS: kuttl/harness/examples-simplest (37.98s) --- PASS: kuttl/harness/examples-simple-prod-with-volumes (68.93s) --- PASS: kuttl/harness/examples-simple-prod (69.06s) --- PASS: kuttl/harness/examples-business-application-injected-sidecar (41.21s) --- PASS: kuttl/harness/examples-openshift-with-htpasswd (22.42s) --- PASS: kuttl/harness/examples-openshift-agent-as-daemonset (40.94s) --- PASS: kuttl/harness/examples-collector-with-priority-class (39.78s) --- PASS: kuttl/harness/examples-all-in-one-with-options (33.92s) --- PASS: kuttl/harness/examples-auto-provision-kafka (332.11s) --- PASS: kuttl/harness/examples-agent-with-priority-class (38.40s) --- PASS: kuttl/harness/examples-agent-as-daemonset (38.23s) FAIL + exit_code=1 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name examples --report --output /logs/artifacts/examples.xml ./artifacts/kuttl-report.xml time="2023-11-13T08:40:45Z" level=debug msg="Setting a new name for the test suites" time="2023-11-13T08:40:45Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-11-13T08:40:45Z" level=debug msg="normalizing test case names" time="2023-11-13T08:40:45Z" level=debug msg="examples/artifacts -> examples_artifacts" time="2023-11-13T08:40:45Z" level=debug msg="examples/examples-service-types -> examples_examples_service_types" time="2023-11-13T08:40:45Z" level=debug msg="examples/examples-with-sampling -> examples_examples_with_sampling" time="2023-11-13T08:40:45Z" level=debug msg="examples/examples-with-cassandra -> examples_examples_with_cassandra" time="2023-11-13T08:40:45Z" level=debug msg="examples/examples-with-badger-and-volume -> examples_examples_with_badger_and_volume" time="2023-11-13T08:40:45Z" level=debug msg="examples/examples-with-badger -> examples_examples_with_badger" time="2023-11-13T08:40:45Z" level=debug msg="examples/examples-simplest -> examples_examples_simplest" time="2023-11-13T08:40:45Z" level=debug msg="examples/examples-simple-prod-with-volumes -> examples_examples_simple_prod_with_volumes" time="2023-11-13T08:40:45Z" level=debug msg="examples/examples-simple-prod -> examples_examples_simple_prod" time="2023-11-13T08:40:45Z" level=debug msg="examples/examples-business-application-injected-sidecar -> examples_examples_business_application_injected_sidecar" time="2023-11-13T08:40:45Z" level=debug msg="examples/examples-openshift-with-htpasswd -> examples_examples_openshift_with_htpasswd" time="2023-11-13T08:40:45Z" level=debug msg="examples/examples-openshift-agent-as-daemonset -> examples_examples_openshift_agent_as_daemonset" time="2023-11-13T08:40:45Z" level=debug msg="examples/examples-collector-with-priority-class -> examples_examples_collector_with_priority_class" time="2023-11-13T08:40:45Z" level=debug msg="examples/examples-all-in-one-with-options -> examples_examples_all_in_one_with_options" time="2023-11-13T08:40:45Z" level=debug msg="examples/examples-auto-provision-kafka -> examples_examples_auto_provision_kafka" time="2023-11-13T08:40:45Z" level=debug msg="examples/examples-agent-with-priority-class -> examples_examples_agent_with_priority_class" time="2023-11-13T08:40:45Z" level=debug msg="examples/examples-agent-as-daemonset -> examples_examples_agent_as_daemonset" +---------------------------------------------------------+--------+ | NAME | RESULT | +---------------------------------------------------------+--------+ | examples_artifacts | passed | | examples_examples_service_types | passed | | examples_examples_with_sampling | passed | | examples_examples_with_cassandra | passed | | examples_examples_with_badger_and_volume | failed | | examples_examples_with_badger | passed | | examples_examples_simplest | passed | | examples_examples_simple_prod_with_volumes | passed | | examples_examples_simple_prod | passed | | examples_examples_business_application_injected_sidecar | passed | | examples_examples_openshift_with_htpasswd | passed | | examples_examples_openshift_agent_as_daemonset | passed | | examples_examples_collector_with_priority_class | passed | | examples_examples_all_in_one_with_options | passed | | examples_examples_auto_provision_kafka | passed | | examples_examples_agent_with_priority_class | passed | | examples_examples_agent_as_daemonset | passed | +---------------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 1 -gt 0 ']' + count=1 + '[' 1 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh generate false true + '[' 3 -ne 3 ']' + test_suite_name=generate + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/generate.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-generate make[2]: Entering directory '/tmp/jaeger-tests' test -s /tmp/jaeger-tests/bin/operator-sdk || curl -sLo /tmp/jaeger-tests/bin/operator-sdk https://github.com/operator-framework/operator-sdk/releases/download/v1.27.0/operator-sdk_`go env GOOS`_`go env GOARCH` ./hack/install/install-golangci-lint.sh Installing golangci-lint golangci-lint 1.53.2 is installed already ./hack/install/install-goimports.sh Installing goimports Try 0... go install golang.org/x/tools/cmd/goimports@v0.1.12 >>>> Formatting code... ./.ci/format.sh >>>> Building... ./hack/install/install-dependencies.sh Installing go dependencies Try 0... go mod download GOOS= GOARCH= CGO_ENABLED=0 GO111MODULE=on go build -ldflags "-X "github.com/jaegertracing/jaeger-operator/pkg/version".version="1.49.0" -X "github.com/jaegertracing/jaeger-operator/pkg/version".buildDate=2023-11-13T08:40:47Z -X "github.com/jaegertracing/jaeger-operator/pkg/version".defaultJaeger="1.49.0"" -o "bin/jaeger-operator" main.go JAEGER_VERSION="1.49.0" ./tests/e2e/generate/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-08-062604 True False 52m Cluster version is 4.15.0-0.nightly-2023-11-08-062604' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-08-062604 True False 52m Cluster version is 4.15.0-0.nightly-2023-11-08-062604' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/generate/render.sh ++ export SUITE_DIR=./tests/e2e/generate ++ SUITE_DIR=./tests/e2e/generate ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/generate ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + '[' true = true ']' + skip_test generate 'This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 2 -ne 2 ']' + test_name=generate + message='This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/generate/_build + '[' _build '!=' _build ']' + rm -rf generate + warning 'generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed\e[0m' WAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running generate E2E tests' Running generate E2E tests + cd tests/e2e/generate/_build + set +e + KUBECONFIG=/tmp/kubeconfig-894131460 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 08:41:09 | artifacts | Creating namespace: kuttl-test-healthy-titmouse logger.go:42: 08:41:09 | artifacts | artifacts events from ns kuttl-test-healthy-titmouse: logger.go:42: 08:41:09 | artifacts | Deleting namespace: kuttl-test-healthy-titmouse === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (5.97s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.81s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name generate --report --output /logs/artifacts/generate.xml ./artifacts/kuttl-report.xml time="2023-11-13T08:41:15Z" level=debug msg="Setting a new name for the test suites" time="2023-11-13T08:41:15Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-11-13T08:41:15Z" level=debug msg="normalizing test case names" time="2023-11-13T08:41:15Z" level=debug msg="generate/artifacts -> generate_artifacts" +--------------------+--------+ | NAME | RESULT | +--------------------+--------+ | generate_artifacts | passed | +--------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 1 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + '[' 1 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh miscellaneous false true + '[' 3 -ne 3 ']' + test_suite_name=miscellaneous + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/miscellaneous.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-miscellaneous make[2]: Entering directory '/tmp/jaeger-tests' SKIP_ES_EXTERNAL=true ./tests/e2e/miscellaneous/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-08-062604 True False 52m Cluster version is 4.15.0-0.nightly-2023-11-08-062604' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-08-062604 True False 52m Cluster version is 4.15.0-0.nightly-2023-11-08-062604' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/miscellaneous/render.sh ++ export SUITE_DIR=./tests/e2e/miscellaneous ++ SUITE_DIR=./tests/e2e/miscellaneous ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/miscellaneous ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + '[' true = true ']' + skip_test cassandra-spark 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=cassandra-spark + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + rm -rf cassandra-spark + warning 'cassandra-spark: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: cassandra-spark: Test not supported in OpenShift\e[0m' WAR: cassandra-spark: Test not supported in OpenShift + start_test collector-autoscale + '[' 1 -ne 1 ']' + test_name=collector-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-autoscale\e[0m' Rendering files for test collector-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p collector-autoscale + cd collector-autoscale + jaeger_name=simple-prod + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + ELASTICSEARCH_NODECOUNT=1 + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.resources.requests.memory="200m"' 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.autoscale=true 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.minReplicas=1 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.maxReplicas=2 01-install.yaml + kubectl api-versions + grep autoscaling/v2beta2 -q + rm ./04-assert.yaml + generate_otlp_e2e_tests http + test_protocol=http + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-http\e[0m' Rendering files for test collector-otlp-allinone-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-autoscale + '[' collector-autoscale '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-http + cd collector-otlp-allinone-http + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger http true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-http\e[0m' Rendering files for test collector-otlp-production-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-http + '[' collector-otlp-allinone-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-http + cd collector-otlp-production-http + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger http true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + generate_otlp_e2e_tests grpc + test_protocol=grpc + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-grpc\e[0m' Rendering files for test collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-http + '[' collector-otlp-production-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-grpc + cd collector-otlp-allinone-grpc + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-grpc\e[0m' Rendering files for test collector-otlp-production-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-grpc + '[' collector-otlp-allinone-grpc '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-grpc + cd collector-otlp-production-grpc + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + '[' true = true ']' + skip_test istio 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=istio + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-grpc + '[' collector-otlp-production-grpc '!=' _build ']' + cd .. + rm -rf istio + warning 'istio: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: istio: Test not supported in OpenShift\e[0m' WAR: istio: Test not supported in OpenShift + '[' true = true ']' + skip_test outside-cluster 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=outside-cluster + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + rm -rf outside-cluster + warning 'outside-cluster: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: outside-cluster: Test not supported in OpenShift\e[0m' WAR: outside-cluster: Test not supported in OpenShift + start_test set-custom-img + '[' 1 -ne 1 ']' + test_name=set-custom-img + echo =========================================================================== =========================================================================== + info 'Rendering files for test set-custom-img' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test set-custom-img\e[0m' Rendering files for test set-custom-img + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p set-custom-img + cd set-custom-img + jaeger_name=my-jaeger + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.image="test"' ./02-install.yaml + '[' true = true ']' + skip_test non-cluster-wide 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=non-cluster-wide + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/set-custom-img + '[' set-custom-img '!=' _build ']' + cd .. + rm -rf non-cluster-wide + warning 'non-cluster-wide: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: non-cluster-wide: Test not supported in OpenShift\e[0m' WAR: non-cluster-wide: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running miscellaneous E2E tests' Running miscellaneous E2E tests + cd tests/e2e/miscellaneous/_build + set +e + KUBECONFIG=/tmp/kubeconfig-894131460 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 7 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/collector-autoscale === PAUSE kuttl/harness/collector-autoscale === RUN kuttl/harness/collector-otlp-allinone-grpc === PAUSE kuttl/harness/collector-otlp-allinone-grpc === RUN kuttl/harness/collector-otlp-allinone-http === PAUSE kuttl/harness/collector-otlp-allinone-http === RUN kuttl/harness/collector-otlp-production-grpc === PAUSE kuttl/harness/collector-otlp-production-grpc === RUN kuttl/harness/collector-otlp-production-http === PAUSE kuttl/harness/collector-otlp-production-http === RUN kuttl/harness/set-custom-img === PAUSE kuttl/harness/set-custom-img === CONT kuttl/harness/artifacts logger.go:42: 08:41:26 | artifacts | Creating namespace: kuttl-test-harmless-impala logger.go:42: 08:41:26 | artifacts | artifacts events from ns kuttl-test-harmless-impala: logger.go:42: 08:41:26 | artifacts | Deleting namespace: kuttl-test-harmless-impala === CONT kuttl/harness/collector-otlp-production-grpc logger.go:42: 08:41:32 | collector-otlp-production-grpc | Creating namespace: kuttl-test-sacred-ocelot logger.go:42: 08:41:32 | collector-otlp-production-grpc/1-install | starting test step 1-install logger.go:42: 08:41:32 | collector-otlp-production-grpc/1-install | Jaeger:kuttl-test-sacred-ocelot/my-jaeger created logger.go:42: 08:42:08 | collector-otlp-production-grpc/1-install | test step completed 1-install logger.go:42: 08:42:08 | collector-otlp-production-grpc/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:42:08 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 08:42:10 | collector-otlp-production-grpc/2-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:42:16 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c REPORTING_PROTOCOL=grpc ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 08:42:17 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:42:17 | collector-otlp-production-grpc/2-smoke-test | job.batch/report-span created logger.go:42: 08:42:17 | collector-otlp-production-grpc/2-smoke-test | job.batch/check-span created logger.go:42: 08:42:37 | collector-otlp-production-grpc/2-smoke-test | test step completed 2-smoke-test logger.go:42: 08:42:37 | collector-otlp-production-grpc | collector-otlp-production-grpc events from ns kuttl-test-sacred-ocelot: logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:41:39 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestsacredocelotmyjaeger-1-697787bb46 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestsacredocelotmyjaeger-1-697787bbdgbgg replicaset-controller logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:41:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsacredocelotmyjaeger-1-697787bbdgbgg Binding Scheduled Successfully assigned kuttl-test-sacred-ocelot/elasticsearch-cdm-kuttltestsacredocelotmyjaeger-1-697787bbdgbgg to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:41:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsacredocelotmyjaeger-1-697787bbdgbgg AddedInterface Add eth0 [10.128.2.45/23] from ovn-kubernetes logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:41:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsacredocelotmyjaeger-1-697787bbdgbgg.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:41:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsacredocelotmyjaeger-1-697787bbdgbgg.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:41:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsacredocelotmyjaeger-1-697787bbdgbgg.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:41:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsacredocelotmyjaeger-1-697787bbdgbgg.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:41:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsacredocelotmyjaeger-1-697787bbdgbgg.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:41:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsacredocelotmyjaeger-1-697787bbdgbgg.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:41:39 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestsacredocelotmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestsacredocelotmyjaeger-1-697787bb46 to 1 deployment-controller logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:41:49 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestsacredocelotmyjaeger-1-697787bbdgbgg.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:41:54 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestsacredocelotmyjaeger-1-697787bbdgbgg.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:06 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-9j89s Binding Scheduled Successfully assigned kuttl-test-sacred-ocelot/my-jaeger-collector-5489f5bd9b-9j89s to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:06 +0000 UTC Warning Pod my-jaeger-collector-5489f5bd9b-9j89s FailedMount MountVolume.SetUp failed for volume "my-jaeger-collector-tls-config-volume" : secret "my-jaeger-collector-headless-tls" not found kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:06 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5489f5bd9b SuccessfulCreate Created pod: my-jaeger-collector-5489f5bd9b-9j89s replicaset-controller logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:06 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-5489f5bd9b to 1 deployment-controller logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:06 +0000 UTC Normal Pod my-jaeger-query-7886d9bb9d-nm6l7 Binding Scheduled Successfully assigned kuttl-test-sacred-ocelot/my-jaeger-query-7886d9bb9d-nm6l7 to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:06 +0000 UTC Normal Pod my-jaeger-query-7886d9bb9d-nm6l7 AddedInterface Add eth0 [10.129.2.72/23] from ovn-kubernetes logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:06 +0000 UTC Normal Pod my-jaeger-query-7886d9bb9d-nm6l7.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:06 +0000 UTC Normal Pod my-jaeger-query-7886d9bb9d-nm6l7.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:06 +0000 UTC Normal Pod my-jaeger-query-7886d9bb9d-nm6l7.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:06 +0000 UTC Normal Pod my-jaeger-query-7886d9bb9d-nm6l7.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:06 +0000 UTC Normal Pod my-jaeger-query-7886d9bb9d-nm6l7.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:06 +0000 UTC Normal Pod my-jaeger-query-7886d9bb9d-nm6l7.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:06 +0000 UTC Normal Pod my-jaeger-query-7886d9bb9d-nm6l7.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:06 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-7886d9bb9d SuccessfulCreate Created pod: my-jaeger-query-7886d9bb9d-nm6l7 replicaset-controller logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:06 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-7886d9bb9d to 1 deployment-controller logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:07 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-9j89s AddedInterface Add eth0 [10.131.0.91/23] from ovn-kubernetes logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:07 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-9j89s.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:07 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-9j89s.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:07 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-9j89s.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:07 +0000 UTC Normal Pod my-jaeger-query-7886d9bb9d-nm6l7.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:07 +0000 UTC Normal Pod my-jaeger-query-7886d9bb9d-nm6l7.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:12 +0000 UTC Normal Pod my-jaeger-query-7886d9bb9d-nm6l7.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:12 +0000 UTC Normal Pod my-jaeger-query-7886d9bb9d-nm6l7.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:12 +0000 UTC Normal Pod my-jaeger-query-7886d9bb9d-nm6l7.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:12 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-7886d9bb9d SuccessfulDelete Deleted pod: my-jaeger-query-7886d9bb9d-nm6l7 replicaset-controller logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:12 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-7886d9bb9d to 0 from 1 deployment-controller logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:13 +0000 UTC Normal Pod my-jaeger-query-6647469d9-b5vrh Binding Scheduled Successfully assigned kuttl-test-sacred-ocelot/my-jaeger-query-6647469d9-b5vrh to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:13 +0000 UTC Normal Pod my-jaeger-query-6647469d9-b5vrh AddedInterface Add eth0 [10.129.2.73/23] from ovn-kubernetes logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:13 +0000 UTC Normal Pod my-jaeger-query-6647469d9-b5vrh.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:13 +0000 UTC Normal Pod my-jaeger-query-6647469d9-b5vrh.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:13 +0000 UTC Normal Pod my-jaeger-query-6647469d9-b5vrh.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:13 +0000 UTC Normal Pod my-jaeger-query-6647469d9-b5vrh.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:13 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-6647469d9 SuccessfulCreate Created pod: my-jaeger-query-6647469d9-b5vrh replicaset-controller logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:13 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-6647469d9 to 1 deployment-controller logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:14 +0000 UTC Normal Pod my-jaeger-query-6647469d9-b5vrh.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:14 +0000 UTC Normal Pod my-jaeger-query-6647469d9-b5vrh.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:14 +0000 UTC Normal Pod my-jaeger-query-6647469d9-b5vrh.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:14 +0000 UTC Normal Pod my-jaeger-query-6647469d9-b5vrh.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:14 +0000 UTC Normal Pod my-jaeger-query-6647469d9-b5vrh.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:17 +0000 UTC Normal Pod check-span-mrmlh Binding Scheduled Successfully assigned kuttl-test-sacred-ocelot/check-span-mrmlh to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:17 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-mrmlh job-controller logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:17 +0000 UTC Normal Pod report-span-7f4vf Binding Scheduled Successfully assigned kuttl-test-sacred-ocelot/report-span-7f4vf to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:17 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-7f4vf job-controller logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:18 +0000 UTC Normal Pod check-span-mrmlh AddedInterface Add eth0 [10.131.0.93/23] from ovn-kubernetes logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:18 +0000 UTC Normal Pod check-span-mrmlh.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:18 +0000 UTC Normal Pod check-span-mrmlh.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:18 +0000 UTC Normal Pod check-span-mrmlh.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:18 +0000 UTC Normal Pod report-span-7f4vf AddedInterface Add eth0 [10.131.0.92/23] from ovn-kubernetes logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:18 +0000 UTC Normal Pod report-span-7f4vf.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:18 +0000 UTC Normal Pod report-span-7f4vf.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:18 +0000 UTC Normal Pod report-span-7f4vf.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:24 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:24 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:24 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:42:37 | collector-otlp-production-grpc | 2023-11-13 08:42:36 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:42:37 | collector-otlp-production-grpc | Deleting namespace: kuttl-test-sacred-ocelot === CONT kuttl/harness/set-custom-img logger.go:42: 08:42:43 | set-custom-img | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:42:43 | set-custom-img | Ignoring check-collector-img.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:42:43 | set-custom-img | Creating namespace: kuttl-test-star-squirrel logger.go:42: 08:42:43 | set-custom-img/1-install | starting test step 1-install logger.go:42: 08:42:44 | set-custom-img/1-install | Jaeger:kuttl-test-star-squirrel/my-jaeger created logger.go:42: 08:43:20 | set-custom-img/1-install | test step completed 1-install logger.go:42: 08:43:20 | set-custom-img/2-install | starting test step 2-install logger.go:42: 08:43:20 | set-custom-img/2-install | Jaeger:kuttl-test-star-squirrel/my-jaeger updated logger.go:42: 08:43:20 | set-custom-img/2-install | test step completed 2-install logger.go:42: 08:43:20 | set-custom-img/3-check-image | starting test step 3-check-image logger.go:42: 08:43:20 | set-custom-img/3-check-image | running command: [sh -c ./check-collector-img.sh] logger.go:42: 08:43:20 | set-custom-img/3-check-image | Collector image missmatch. Expected: test. Has: registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856 logger.go:42: 08:43:25 | set-custom-img/3-check-image | Collector image asserted properly! logger.go:42: 08:43:25 | set-custom-img/3-check-image | test step completed 3-check-image logger.go:42: 08:43:25 | set-custom-img | set-custom-img events from ns kuttl-test-star-squirrel: logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:42:49 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttlteststarsquirrelmyjaeger-1-554c6f5688 SuccessfulCreate Created pod: elasticsearch-cdm-kuttlteststarsquirrelmyjaeger-1-554c6f56gjxdb replicaset-controller logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:42:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttlteststarsquirrelmyjaeger-1-554c6f56gjxdb Binding Scheduled Successfully assigned kuttl-test-star-squirrel/elasticsearch-cdm-kuttlteststarsquirrelmyjaeger-1-554c6f56gjxdb to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:42:49 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttlteststarsquirrelmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttlteststarsquirrelmyjaeger-1-554c6f5688 to 1 deployment-controller logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:42:50 +0000 UTC Normal Pod elasticsearch-cdm-kuttlteststarsquirrelmyjaeger-1-554c6f56gjxdb AddedInterface Add eth0 [10.128.2.46/23] from ovn-kubernetes logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:42:50 +0000 UTC Normal Pod elasticsearch-cdm-kuttlteststarsquirrelmyjaeger-1-554c6f56gjxdb.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:42:50 +0000 UTC Normal Pod elasticsearch-cdm-kuttlteststarsquirrelmyjaeger-1-554c6f56gjxdb.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:42:50 +0000 UTC Normal Pod elasticsearch-cdm-kuttlteststarsquirrelmyjaeger-1-554c6f56gjxdb.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:42:50 +0000 UTC Normal Pod elasticsearch-cdm-kuttlteststarsquirrelmyjaeger-1-554c6f56gjxdb.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:42:50 +0000 UTC Normal Pod elasticsearch-cdm-kuttlteststarsquirrelmyjaeger-1-554c6f56gjxdb.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:42:50 +0000 UTC Normal Pod elasticsearch-cdm-kuttlteststarsquirrelmyjaeger-1-554c6f56gjxdb.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:00 +0000 UTC Warning Pod elasticsearch-cdm-kuttlteststarsquirrelmyjaeger-1-554c6f56gjxdb.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:05 +0000 UTC Warning Pod elasticsearch-cdm-kuttlteststarsquirrelmyjaeger-1-554c6f56gjxdb.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:16 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-qb8w8 Binding Scheduled Successfully assigned kuttl-test-star-squirrel/my-jaeger-collector-5489f5bd9b-qb8w8 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:16 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5489f5bd9b SuccessfulCreate Created pod: my-jaeger-collector-5489f5bd9b-qb8w8 replicaset-controller logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:16 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-5489f5bd9b to 1 deployment-controller logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:16 +0000 UTC Normal Pod my-jaeger-query-96bd5d77f-jcr9g Binding Scheduled Successfully assigned kuttl-test-star-squirrel/my-jaeger-query-96bd5d77f-jcr9g to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:16 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-96bd5d77f SuccessfulCreate Created pod: my-jaeger-query-96bd5d77f-jcr9g replicaset-controller logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:16 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-96bd5d77f to 1 deployment-controller logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:17 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-qb8w8 AddedInterface Add eth0 [10.131.0.94/23] from ovn-kubernetes logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:17 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-qb8w8.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:17 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-qb8w8.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:17 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-qb8w8.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:17 +0000 UTC Normal Pod my-jaeger-query-96bd5d77f-jcr9g AddedInterface Add eth0 [10.129.2.74/23] from ovn-kubernetes logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:17 +0000 UTC Normal Pod my-jaeger-query-96bd5d77f-jcr9g.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:17 +0000 UTC Normal Pod my-jaeger-query-96bd5d77f-jcr9g.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:17 +0000 UTC Normal Pod my-jaeger-query-96bd5d77f-jcr9g.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:17 +0000 UTC Normal Pod my-jaeger-query-96bd5d77f-jcr9g.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:17 +0000 UTC Normal Pod my-jaeger-query-96bd5d77f-jcr9g.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:17 +0000 UTC Normal Pod my-jaeger-query-96bd5d77f-jcr9g.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:17 +0000 UTC Normal Pod my-jaeger-query-96bd5d77f-jcr9g.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:17 +0000 UTC Normal Pod my-jaeger-query-96bd5d77f-jcr9g.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:17 +0000 UTC Normal Pod my-jaeger-query-96bd5d77f-jcr9g.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:23 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-qb8w8.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:23 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5489f5bd9b SuccessfulDelete Deleted pod: my-jaeger-collector-5489f5bd9b-qb8w8 replicaset-controller logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:23 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled down replica set my-jaeger-collector-5489f5bd9b to 0 from 1 deployment-controller logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:24 +0000 UTC Normal Pod my-jaeger-collector-7fd96ccd44-ng98m Binding Scheduled Successfully assigned kuttl-test-star-squirrel/my-jaeger-collector-7fd96ccd44-ng98m to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:24 +0000 UTC Normal Pod my-jaeger-collector-7fd96ccd44-ng98m AddedInterface Add eth0 [10.131.0.95/23] from ovn-kubernetes logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:24 +0000 UTC Normal Pod my-jaeger-collector-7fd96ccd44-ng98m.spec.containers{jaeger-collector} Pulling Pulling image "test" kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:24 +0000 UTC Warning Pod my-jaeger-collector-7fd96ccd44-ng98m.spec.containers{jaeger-collector} Failed Failed to pull image "test": reading manifest latest in docker.io/library/test: requested access to the resource is denied kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:24 +0000 UTC Warning Pod my-jaeger-collector-7fd96ccd44-ng98m.spec.containers{jaeger-collector} Failed Error: ErrImagePull kubelet logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:24 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-7fd96ccd44 SuccessfulCreate Created pod: my-jaeger-collector-7fd96ccd44-ng98m replicaset-controller logger.go:42: 08:43:25 | set-custom-img | 2023-11-13 08:43:24 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-7fd96ccd44 to 1 deployment-controller logger.go:42: 08:43:25 | set-custom-img | Deleting namespace: kuttl-test-star-squirrel === CONT kuttl/harness/collector-otlp-production-http logger.go:42: 08:43:31 | collector-otlp-production-http | Creating namespace: kuttl-test-closing-lab logger.go:42: 08:43:31 | collector-otlp-production-http/1-install | starting test step 1-install logger.go:42: 08:43:31 | collector-otlp-production-http/1-install | Jaeger:kuttl-test-closing-lab/my-jaeger created logger.go:42: 08:44:07 | collector-otlp-production-http/1-install | test step completed 1-install logger.go:42: 08:44:07 | collector-otlp-production-http/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:44:07 | collector-otlp-production-http/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 08:44:09 | collector-otlp-production-http/2-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:44:15 | collector-otlp-production-http/2-smoke-test | running command: [sh -c REPORTING_PROTOCOL=http ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 08:44:16 | collector-otlp-production-http/2-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:44:17 | collector-otlp-production-http/2-smoke-test | job.batch/report-span created logger.go:42: 08:44:17 | collector-otlp-production-http/2-smoke-test | job.batch/check-span created logger.go:42: 08:44:28 | collector-otlp-production-http/2-smoke-test | test step completed 2-smoke-test logger.go:42: 08:44:28 | collector-otlp-production-http | collector-otlp-production-http events from ns kuttl-test-closing-lab: logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:43:37 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestclosinglabmyjaeger-1-5bd57947c4 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestclosinglabmyjaeger-1-5bd57947c4wg942 replicaset-controller logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:43:37 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestclosinglabmyjaeger-1-5bd57947c4wg942 Binding Scheduled Successfully assigned kuttl-test-closing-lab/elasticsearch-cdm-kuttltestclosinglabmyjaeger-1-5bd57947c4wg942 to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:43:37 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestclosinglabmyjaeger-1-5bd57947c4wg942 FailedMount MountVolume.SetUp failed for volume "elasticsearch-metrics" : secret "elasticsearch-metrics" not found kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:43:37 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestclosinglabmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestclosinglabmyjaeger-1-5bd57947c4 to 1 deployment-controller logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:43:38 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestclosinglabmyjaeger-1-5bd57947c4wg942 AddedInterface Add eth0 [10.128.2.47/23] from ovn-kubernetes logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:43:38 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestclosinglabmyjaeger-1-5bd57947c4wg942.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:43:38 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestclosinglabmyjaeger-1-5bd57947c4wg942.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:43:38 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestclosinglabmyjaeger-1-5bd57947c4wg942.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:43:38 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestclosinglabmyjaeger-1-5bd57947c4wg942.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:43:38 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestclosinglabmyjaeger-1-5bd57947c4wg942.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:43:38 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestclosinglabmyjaeger-1-5bd57947c4wg942.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:43:48 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestclosinglabmyjaeger-1-5bd57947c4wg942.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:43:53 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestclosinglabmyjaeger-1-5bd57947c4wg942.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:04 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-hsksp Binding Scheduled Successfully assigned kuttl-test-closing-lab/my-jaeger-collector-5489f5bd9b-hsksp to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:04 +0000 UTC Warning Pod my-jaeger-collector-5489f5bd9b-hsksp FailedMount MountVolume.SetUp failed for volume "my-jaeger-collector-tls-config-volume" : secret "my-jaeger-collector-headless-tls" not found kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:04 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5489f5bd9b SuccessfulCreate Created pod: my-jaeger-collector-5489f5bd9b-hsksp replicaset-controller logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:04 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-5489f5bd9b to 1 deployment-controller logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:04 +0000 UTC Normal Pod my-jaeger-query-57fdd5cd46-52c6v Binding Scheduled Successfully assigned kuttl-test-closing-lab/my-jaeger-query-57fdd5cd46-52c6v to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:04 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-57fdd5cd46 SuccessfulCreate Created pod: my-jaeger-query-57fdd5cd46-52c6v replicaset-controller logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:04 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-57fdd5cd46 to 1 deployment-controller logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:05 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-hsksp AddedInterface Add eth0 [10.131.0.96/23] from ovn-kubernetes logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:05 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-hsksp.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:05 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-hsksp.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:05 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-hsksp.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:05 +0000 UTC Normal Pod my-jaeger-query-57fdd5cd46-52c6v AddedInterface Add eth0 [10.129.2.75/23] from ovn-kubernetes logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:05 +0000 UTC Normal Pod my-jaeger-query-57fdd5cd46-52c6v.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:05 +0000 UTC Normal Pod my-jaeger-query-57fdd5cd46-52c6v.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:05 +0000 UTC Normal Pod my-jaeger-query-57fdd5cd46-52c6v.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:05 +0000 UTC Normal Pod my-jaeger-query-57fdd5cd46-52c6v.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:05 +0000 UTC Normal Pod my-jaeger-query-57fdd5cd46-52c6v.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:05 +0000 UTC Normal Pod my-jaeger-query-57fdd5cd46-52c6v.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:05 +0000 UTC Normal Pod my-jaeger-query-57fdd5cd46-52c6v.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:05 +0000 UTC Normal Pod my-jaeger-query-57fdd5cd46-52c6v.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:05 +0000 UTC Normal Pod my-jaeger-query-57fdd5cd46-52c6v.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:11 +0000 UTC Normal Pod my-jaeger-query-57fdd5cd46-52c6v.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:11 +0000 UTC Normal Pod my-jaeger-query-57fdd5cd46-52c6v.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:11 +0000 UTC Normal Pod my-jaeger-query-57fdd5cd46-52c6v.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:11 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-57fdd5cd46 SuccessfulDelete Deleted pod: my-jaeger-query-57fdd5cd46-52c6v replicaset-controller logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:11 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-57fdd5cd46 to 0 from 1 deployment-controller logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:12 +0000 UTC Normal Pod my-jaeger-query-748588c8dc-lpc9t Binding Scheduled Successfully assigned kuttl-test-closing-lab/my-jaeger-query-748588c8dc-lpc9t to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:12 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-748588c8dc SuccessfulCreate Created pod: my-jaeger-query-748588c8dc-lpc9t replicaset-controller logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:12 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-748588c8dc to 1 deployment-controller logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:13 +0000 UTC Normal Pod my-jaeger-query-748588c8dc-lpc9t AddedInterface Add eth0 [10.129.2.76/23] from ovn-kubernetes logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:13 +0000 UTC Normal Pod my-jaeger-query-748588c8dc-lpc9t.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:13 +0000 UTC Normal Pod my-jaeger-query-748588c8dc-lpc9t.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:13 +0000 UTC Normal Pod my-jaeger-query-748588c8dc-lpc9t.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:13 +0000 UTC Normal Pod my-jaeger-query-748588c8dc-lpc9t.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:13 +0000 UTC Normal Pod my-jaeger-query-748588c8dc-lpc9t.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:13 +0000 UTC Normal Pod my-jaeger-query-748588c8dc-lpc9t.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:13 +0000 UTC Normal Pod my-jaeger-query-748588c8dc-lpc9t.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:13 +0000 UTC Normal Pod my-jaeger-query-748588c8dc-lpc9t.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:13 +0000 UTC Normal Pod my-jaeger-query-748588c8dc-lpc9t.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:17 +0000 UTC Normal Pod check-span-xlhhm Binding Scheduled Successfully assigned kuttl-test-closing-lab/check-span-xlhhm to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:17 +0000 UTC Normal Pod check-span-xlhhm AddedInterface Add eth0 [10.131.0.98/23] from ovn-kubernetes logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:17 +0000 UTC Normal Pod check-span-xlhhm.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:17 +0000 UTC Normal Pod check-span-xlhhm.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:17 +0000 UTC Normal Pod check-span-xlhhm.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:17 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-xlhhm job-controller logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:17 +0000 UTC Normal Pod report-span-2x7nd Binding Scheduled Successfully assigned kuttl-test-closing-lab/report-span-2x7nd to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:17 +0000 UTC Normal Pod report-span-2x7nd AddedInterface Add eth0 [10.131.0.97/23] from ovn-kubernetes logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:17 +0000 UTC Normal Pod report-span-2x7nd.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:17 +0000 UTC Normal Pod report-span-2x7nd.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:17 +0000 UTC Normal Pod report-span-2x7nd.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:17 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-2x7nd job-controller logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:19 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:19 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:19 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:44:28 | collector-otlp-production-http | 2023-11-13 08:44:28 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:44:28 | collector-otlp-production-http | Deleting namespace: kuttl-test-closing-lab === CONT kuttl/harness/collector-otlp-allinone-grpc logger.go:42: 08:44:40 | collector-otlp-allinone-grpc | Creating namespace: kuttl-test-smashing-crawdad logger.go:42: 08:44:40 | collector-otlp-allinone-grpc/0-install | starting test step 0-install logger.go:42: 08:44:40 | collector-otlp-allinone-grpc/0-install | Jaeger:kuttl-test-smashing-crawdad/my-jaeger created logger.go:42: 08:44:47 | collector-otlp-allinone-grpc/0-install | test step completed 0-install logger.go:42: 08:44:47 | collector-otlp-allinone-grpc/1-smoke-test | starting test step 1-smoke-test logger.go:42: 08:44:47 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 08:44:49 | collector-otlp-allinone-grpc/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:44:55 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=grpc ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 08:44:56 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:44:56 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/report-span created logger.go:42: 08:44:56 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/check-span created logger.go:42: 08:45:15 | collector-otlp-allinone-grpc/1-smoke-test | test step completed 1-smoke-test logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | collector-otlp-allinone-grpc events from ns kuttl-test-smashing-crawdad: logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:43 +0000 UTC Normal Pod my-jaeger-f47ccb4d5-7mhx9 Binding Scheduled Successfully assigned kuttl-test-smashing-crawdad/my-jaeger-f47ccb4d5-7mhx9 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:43 +0000 UTC Normal ReplicaSet.apps my-jaeger-f47ccb4d5 SuccessfulCreate Created pod: my-jaeger-f47ccb4d5-7mhx9 replicaset-controller logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:43 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-f47ccb4d5 to 1 deployment-controller logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:44 +0000 UTC Warning Pod my-jaeger-f47ccb4d5-7mhx9 FailedMount MountVolume.SetUp failed for volume "my-jaeger-collector-tls-config-volume" : secret "my-jaeger-collector-headless-tls" not found kubelet logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:44 +0000 UTC Normal Pod my-jaeger-f47ccb4d5-7mhx9 AddedInterface Add eth0 [10.131.0.99/23] from ovn-kubernetes logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:45 +0000 UTC Normal Pod my-jaeger-f47ccb4d5-7mhx9.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:45 +0000 UTC Normal Pod my-jaeger-f47ccb4d5-7mhx9.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:45 +0000 UTC Normal Pod my-jaeger-f47ccb4d5-7mhx9.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:45 +0000 UTC Normal Pod my-jaeger-f47ccb4d5-7mhx9.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:45 +0000 UTC Normal Pod my-jaeger-f47ccb4d5-7mhx9.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:45 +0000 UTC Normal Pod my-jaeger-f47ccb4d5-7mhx9.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:51 +0000 UTC Normal Pod my-jaeger-f47ccb4d5-7mhx9.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:51 +0000 UTC Normal Pod my-jaeger-f47ccb4d5-7mhx9.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:51 +0000 UTC Normal ReplicaSet.apps my-jaeger-f47ccb4d5 SuccessfulDelete Deleted pod: my-jaeger-f47ccb4d5-7mhx9 replicaset-controller logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:51 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-f47ccb4d5 to 0 from 1 deployment-controller logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:52 +0000 UTC Normal Pod my-jaeger-bd465c5ff-nbjwk Binding Scheduled Successfully assigned kuttl-test-smashing-crawdad/my-jaeger-bd465c5ff-nbjwk to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:52 +0000 UTC Normal Pod my-jaeger-bd465c5ff-nbjwk AddedInterface Add eth0 [10.129.2.77/23] from ovn-kubernetes logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:52 +0000 UTC Normal Pod my-jaeger-bd465c5ff-nbjwk.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:52 +0000 UTC Normal Pod my-jaeger-bd465c5ff-nbjwk.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:52 +0000 UTC Normal Pod my-jaeger-bd465c5ff-nbjwk.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:52 +0000 UTC Normal Pod my-jaeger-bd465c5ff-nbjwk.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:52 +0000 UTC Normal Pod my-jaeger-bd465c5ff-nbjwk.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:52 +0000 UTC Normal Pod my-jaeger-bd465c5ff-nbjwk.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:52 +0000 UTC Normal ReplicaSet.apps my-jaeger-bd465c5ff SuccessfulCreate Created pod: my-jaeger-bd465c5ff-nbjwk replicaset-controller logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:52 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-bd465c5ff to 1 deployment-controller logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:56 +0000 UTC Normal Pod check-span-qrt7d Binding Scheduled Successfully assigned kuttl-test-smashing-crawdad/check-span-qrt7d to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:56 +0000 UTC Normal Pod check-span-qrt7d AddedInterface Add eth0 [10.131.0.101/23] from ovn-kubernetes logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:56 +0000 UTC Normal Pod check-span-qrt7d.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:56 +0000 UTC Normal Pod check-span-qrt7d.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:56 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-qrt7d job-controller logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:56 +0000 UTC Normal Pod report-span-58pd9 Binding Scheduled Successfully assigned kuttl-test-smashing-crawdad/report-span-58pd9 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:56 +0000 UTC Normal Pod report-span-58pd9 AddedInterface Add eth0 [10.131.0.100/23] from ovn-kubernetes logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:56 +0000 UTC Normal Pod report-span-58pd9.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:56 +0000 UTC Normal Pod report-span-58pd9.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:56 +0000 UTC Normal Pod report-span-58pd9.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:56 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-58pd9 job-controller logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:44:57 +0000 UTC Normal Pod check-span-qrt7d.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | 2023-11-13 08:45:15 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:45:15 | collector-otlp-allinone-grpc | Deleting namespace: kuttl-test-smashing-crawdad === CONT kuttl/harness/collector-otlp-allinone-http logger.go:42: 08:45:27 | collector-otlp-allinone-http | Creating namespace: kuttl-test-mighty-mouse logger.go:42: 08:45:27 | collector-otlp-allinone-http/0-install | starting test step 0-install logger.go:42: 08:45:27 | collector-otlp-allinone-http/0-install | Jaeger:kuttl-test-mighty-mouse/my-jaeger created logger.go:42: 08:45:33 | collector-otlp-allinone-http/0-install | test step completed 0-install logger.go:42: 08:45:33 | collector-otlp-allinone-http/1-smoke-test | starting test step 1-smoke-test logger.go:42: 08:45:33 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 08:45:34 | collector-otlp-allinone-http/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:45:41 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=http ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2 OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 08:45:41 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:45:42 | collector-otlp-allinone-http/1-smoke-test | job.batch/report-span created logger.go:42: 08:45:42 | collector-otlp-allinone-http/1-smoke-test | job.batch/check-span created logger.go:42: 08:45:54 | collector-otlp-allinone-http/1-smoke-test | test step completed 1-smoke-test logger.go:42: 08:45:54 | collector-otlp-allinone-http | collector-otlp-allinone-http events from ns kuttl-test-mighty-mouse: logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:30 +0000 UTC Normal Pod my-jaeger-8bf947df8-m9sxl Binding Scheduled Successfully assigned kuttl-test-mighty-mouse/my-jaeger-8bf947df8-m9sxl to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:30 +0000 UTC Normal ReplicaSet.apps my-jaeger-8bf947df8 SuccessfulCreate Created pod: my-jaeger-8bf947df8-m9sxl replicaset-controller logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:30 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-8bf947df8 to 1 deployment-controller logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:31 +0000 UTC Normal Pod my-jaeger-8bf947df8-m9sxl AddedInterface Add eth0 [10.129.2.78/23] from ovn-kubernetes logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:31 +0000 UTC Normal Pod my-jaeger-8bf947df8-m9sxl.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:31 +0000 UTC Normal Pod my-jaeger-8bf947df8-m9sxl.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:31 +0000 UTC Normal Pod my-jaeger-8bf947df8-m9sxl.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:31 +0000 UTC Normal Pod my-jaeger-8bf947df8-m9sxl.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:31 +0000 UTC Normal Pod my-jaeger-8bf947df8-m9sxl.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:31 +0000 UTC Normal Pod my-jaeger-8bf947df8-m9sxl.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:37 +0000 UTC Normal Pod my-jaeger-594dc78cf-g4vlz Binding Scheduled Successfully assigned kuttl-test-mighty-mouse/my-jaeger-594dc78cf-g4vlz to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:37 +0000 UTC Normal ReplicaSet.apps my-jaeger-594dc78cf SuccessfulCreate Created pod: my-jaeger-594dc78cf-g4vlz replicaset-controller logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:37 +0000 UTC Normal Pod my-jaeger-8bf947df8-m9sxl.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:37 +0000 UTC Normal Pod my-jaeger-8bf947df8-m9sxl.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:37 +0000 UTC Normal ReplicaSet.apps my-jaeger-8bf947df8 SuccessfulDelete Deleted pod: my-jaeger-8bf947df8-m9sxl replicaset-controller logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:37 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-8bf947df8 to 0 from 1 deployment-controller logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:37 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-594dc78cf to 1 deployment-controller logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:38 +0000 UTC Normal Pod my-jaeger-594dc78cf-g4vlz AddedInterface Add eth0 [10.131.0.102/23] from ovn-kubernetes logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:38 +0000 UTC Normal Pod my-jaeger-594dc78cf-g4vlz.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:38 +0000 UTC Normal Pod my-jaeger-594dc78cf-g4vlz.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:38 +0000 UTC Normal Pod my-jaeger-594dc78cf-g4vlz.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:38 +0000 UTC Normal Pod my-jaeger-594dc78cf-g4vlz.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:38 +0000 UTC Normal Pod my-jaeger-594dc78cf-g4vlz.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:38 +0000 UTC Normal Pod my-jaeger-594dc78cf-g4vlz.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:42 +0000 UTC Normal Pod check-span-hptnp Binding Scheduled Successfully assigned kuttl-test-mighty-mouse/check-span-hptnp to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:42 +0000 UTC Normal Pod check-span-hptnp AddedInterface Add eth0 [10.128.2.49/23] from ovn-kubernetes logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:42 +0000 UTC Normal Pod check-span-hptnp.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:42 +0000 UTC Normal Pod check-span-hptnp.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:42 +0000 UTC Normal Pod check-span-hptnp.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:42 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-hptnp job-controller logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:42 +0000 UTC Normal Pod report-span-gldb2 Binding Scheduled Successfully assigned kuttl-test-mighty-mouse/report-span-gldb2 to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:42 +0000 UTC Normal Pod report-span-gldb2 AddedInterface Add eth0 [10.129.2.79/23] from ovn-kubernetes logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:42 +0000 UTC Normal Pod report-span-gldb2.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:42 +0000 UTC Normal Pod report-span-gldb2.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:42 +0000 UTC Normal Pod report-span-gldb2.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:42 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-gldb2 job-controller logger.go:42: 08:45:54 | collector-otlp-allinone-http | 2023-11-13 08:45:53 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:45:54 | collector-otlp-allinone-http | Deleting namespace: kuttl-test-mighty-mouse === CONT kuttl/harness/collector-autoscale logger.go:42: 08:46:06 | collector-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:46:06 | collector-autoscale | Ignoring wait-for-hpa.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:46:06 | collector-autoscale | Creating namespace: kuttl-test-modern-python logger.go:42: 08:46:06 | collector-autoscale/1-install | starting test step 1-install logger.go:42: 08:46:06 | collector-autoscale/1-install | Jaeger:kuttl-test-modern-python/simple-prod created logger.go:42: 08:46:43 | collector-autoscale/1-install | test step completed 1-install logger.go:42: 08:46:43 | collector-autoscale/2-wait-for-hpa | starting test step 2-wait-for-hpa logger.go:42: 08:46:43 | collector-autoscale/2-wait-for-hpa | running command: [sh -c ./wait-for-hpa.sh] logger.go:42: 08:46:43 | collector-autoscale/2-wait-for-hpa | Some HPA metrics are not known yet logger.go:42: 08:46:44 | collector-autoscale/2-wait-for-hpa | test step completed 2-wait-for-hpa logger.go:42: 08:46:44 | collector-autoscale/3- | starting test step 3- logger.go:42: 08:46:44 | collector-autoscale/3- | test step completed 3- logger.go:42: 08:46:44 | collector-autoscale | collector-autoscale events from ns kuttl-test-modern-python: logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmodernpythonsimpleprod-1-99fdc94zxm6 Binding Scheduled Successfully assigned kuttl-test-modern-python/elasticsearch-cdm-kuttltestmodernpythonsimpleprod-1-99fdc94zxm6 to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmodernpythonsimpleprod-1-99fdc94zxm6 AddedInterface Add eth0 [10.128.2.50/23] from ovn-kubernetes logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmodernpythonsimpleprod-1-99fdc94zxm6.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmodernpythonsimpleprod-1-99fdc94zxm6.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmodernpythonsimpleprod-1-99fdc94zxm6.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmodernpythonsimpleprod-1-99fdc94zxm6.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmodernpythonsimpleprod-1-99fdc94zxm6.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmodernpythonsimpleprod-1-99fdc94zxm6.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:13 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestmodernpythonsimpleprod-1-99fdc9858 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestmodernpythonsimpleprod-1-99fdc94zxm6 replicaset-controller logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:13 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestmodernpythonsimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestmodernpythonsimpleprod-1-99fdc9858 to 1 deployment-controller logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:23 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestmodernpythonsimpleprod-1-99fdc94zxm6.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:28 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestmodernpythonsimpleprod-1-99fdc94zxm6.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:40 +0000 UTC Normal Pod simple-prod-collector-67686746b8-mffpf Binding Scheduled Successfully assigned kuttl-test-modern-python/simple-prod-collector-67686746b8-mffpf to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:40 +0000 UTC Normal Pod simple-prod-collector-67686746b8-mffpf AddedInterface Add eth0 [10.129.2.80/23] from ovn-kubernetes logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:40 +0000 UTC Normal Pod simple-prod-collector-67686746b8-mffpf.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:40 +0000 UTC Normal Pod simple-prod-collector-67686746b8-mffpf.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:40 +0000 UTC Normal Pod simple-prod-collector-67686746b8-mffpf.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:40 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-67686746b8 SuccessfulCreate Created pod: simple-prod-collector-67686746b8-mffpf replicaset-controller logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:40 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-67686746b8 to 1 deployment-controller logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:40 +0000 UTC Normal Pod simple-prod-query-6f9bb8df9b-6fbgd Binding Scheduled Successfully assigned kuttl-test-modern-python/simple-prod-query-6f9bb8df9b-6fbgd to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:40 +0000 UTC Normal Pod simple-prod-query-6f9bb8df9b-6fbgd AddedInterface Add eth0 [10.131.0.103/23] from ovn-kubernetes logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:40 +0000 UTC Normal Pod simple-prod-query-6f9bb8df9b-6fbgd.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:40 +0000 UTC Normal Pod simple-prod-query-6f9bb8df9b-6fbgd.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:40 +0000 UTC Normal Pod simple-prod-query-6f9bb8df9b-6fbgd.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:40 +0000 UTC Normal Pod simple-prod-query-6f9bb8df9b-6fbgd.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:40 +0000 UTC Normal Pod simple-prod-query-6f9bb8df9b-6fbgd.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:40 +0000 UTC Normal Pod simple-prod-query-6f9bb8df9b-6fbgd.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:40 +0000 UTC Normal Pod simple-prod-query-6f9bb8df9b-6fbgd.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:40 +0000 UTC Normal Pod simple-prod-query-6f9bb8df9b-6fbgd.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:40 +0000 UTC Normal Pod simple-prod-query-6f9bb8df9b-6fbgd.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:40 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6f9bb8df9b SuccessfulCreate Created pod: simple-prod-query-6f9bb8df9b-6fbgd replicaset-controller logger.go:42: 08:46:44 | collector-autoscale | 2023-11-13 08:46:40 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-6f9bb8df9b to 1 deployment-controller logger.go:42: 08:46:44 | collector-autoscale | Deleting namespace: kuttl-test-modern-python === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (323.98s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.83s) --- PASS: kuttl/harness/collector-otlp-production-grpc (71.17s) --- PASS: kuttl/harness/set-custom-img (47.53s) --- PASS: kuttl/harness/collector-otlp-production-http (68.90s) --- PASS: kuttl/harness/collector-otlp-allinone-grpc (47.00s) --- PASS: kuttl/harness/collector-otlp-allinone-http (38.82s) --- PASS: kuttl/harness/collector-autoscale (44.56s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name miscellaneous --report --output /logs/artifacts/miscellaneous.xml ./artifacts/kuttl-report.xml time="2023-11-13T08:46:51Z" level=debug msg="Setting a new name for the test suites" time="2023-11-13T08:46:51Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-11-13T08:46:51Z" level=debug msg="normalizing test case names" time="2023-11-13T08:46:51Z" level=debug msg="miscellaneous/artifacts -> miscellaneous_artifacts" time="2023-11-13T08:46:51Z" level=debug msg="miscellaneous/collector-otlp-production-grpc -> miscellaneous_collector_otlp_production_grpc" time="2023-11-13T08:46:51Z" level=debug msg="miscellaneous/set-custom-img -> miscellaneous_set_custom_img" time="2023-11-13T08:46:51Z" level=debug msg="miscellaneous/collector-otlp-production-http -> miscellaneous_collector_otlp_production_http" time="2023-11-13T08:46:51Z" level=debug msg="miscellaneous/collector-otlp-allinone-grpc -> miscellaneous_collector_otlp_allinone_grpc" time="2023-11-13T08:46:51Z" level=debug msg="miscellaneous/collector-otlp-allinone-http -> miscellaneous_collector_otlp_allinone_http" time="2023-11-13T08:46:51Z" level=debug msg="miscellaneous/collector-autoscale -> miscellaneous_collector_autoscale" +----------------------------------------------+--------+ | NAME | RESULT | +----------------------------------------------+--------+ | miscellaneous_artifacts | passed | | miscellaneous_collector_otlp_production_grpc | passed | | miscellaneous_set_custom_img | passed | | miscellaneous_collector_otlp_production_http | passed | | miscellaneous_collector_otlp_allinone_grpc | passed | | miscellaneous_collector_otlp_allinone_http | passed | | miscellaneous_collector_autoscale | passed | +----------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 1 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + '[' 1 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh sidecar false true + '[' 3 -ne 3 ']' + test_suite_name=sidecar + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/sidecar.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-sidecar make[2]: Entering directory '/tmp/jaeger-tests' ./tests/e2e/sidecar/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-08-062604 True False 58m Cluster version is 4.15.0-0.nightly-2023-11-08-062604' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-08-062604 True False 58m Cluster version is 4.15.0-0.nightly-2023-11-08-062604' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/sidecar/render.sh ++ export SUITE_DIR=./tests/e2e/sidecar ++ SUITE_DIR=./tests/e2e/sidecar ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/sidecar ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + jaeger_service_name=order + start_test sidecar-deployment + '[' 1 -ne 1 ']' + test_name=sidecar-deployment + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-deployment' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-deployment\e[0m' Rendering files for test sidecar-deployment + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build + '[' _build '!=' _build ']' + mkdir -p sidecar-deployment + cd sidecar-deployment + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-namespace + '[' 1 -ne 1 ']' + test_name=sidecar-namespace + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-namespace' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-namespace\e[0m' Rendering files for test sidecar-namespace + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-deployment + '[' sidecar-deployment '!=' _build ']' + cd .. + mkdir -p sidecar-namespace + cd sidecar-namespace + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-skip-webhook + '[' 1 -ne 1 ']' + test_name=sidecar-skip-webhook + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-skip-webhook' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-skip-webhook\e[0m' Rendering files for test sidecar-skip-webhook + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-namespace + '[' sidecar-namespace '!=' _build ']' + cd .. + mkdir -p sidecar-skip-webhook + cd sidecar-skip-webhook + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running sidecar E2E tests' Running sidecar E2E tests + cd tests/e2e/sidecar/_build + set +e + KUBECONFIG=/tmp/kubeconfig-894131460 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 4 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/sidecar-deployment === PAUSE kuttl/harness/sidecar-deployment === RUN kuttl/harness/sidecar-namespace === PAUSE kuttl/harness/sidecar-namespace === RUN kuttl/harness/sidecar-skip-webhook === PAUSE kuttl/harness/sidecar-skip-webhook === CONT kuttl/harness/artifacts logger.go:42: 08:47:00 | artifacts | Creating namespace: kuttl-test-pro-spaniel logger.go:42: 08:47:00 | artifacts | artifacts events from ns kuttl-test-pro-spaniel: logger.go:42: 08:47:00 | artifacts | Deleting namespace: kuttl-test-pro-spaniel === CONT kuttl/harness/sidecar-namespace logger.go:42: 08:47:05 | sidecar-namespace | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:47:05 | sidecar-namespace | Creating namespace: kuttl-test-sought-bee logger.go:42: 08:47:05 | sidecar-namespace/0-install | starting test step 0-install logger.go:42: 08:47:06 | sidecar-namespace/0-install | Jaeger:kuttl-test-sought-bee/agent-as-sidecar created logger.go:42: 08:47:12 | sidecar-namespace/0-install | test step completed 0-install logger.go:42: 08:47:12 | sidecar-namespace/1-install | starting test step 1-install logger.go:42: 08:47:12 | sidecar-namespace/1-install | Deployment:kuttl-test-sought-bee/vertx-create-span-sidecar created logger.go:42: 08:47:13 | sidecar-namespace/1-install | test step completed 1-install logger.go:42: 08:47:13 | sidecar-namespace/2-enable-injection | starting test step 2-enable-injection logger.go:42: 08:47:13 | sidecar-namespace/2-enable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="true"] logger.go:42: 08:47:13 | sidecar-namespace/2-enable-injection | namespace/kuttl-test-sought-bee annotate logger.go:42: 08:47:18 | sidecar-namespace/2-enable-injection | test step completed 2-enable-injection logger.go:42: 08:47:18 | sidecar-namespace/3-find-service | starting test step 3-find-service logger.go:42: 08:47:18 | sidecar-namespace/3-find-service | Job:kuttl-test-sought-bee/00-find-service created logger.go:42: 08:47:29 | sidecar-namespace/3-find-service | test step completed 3-find-service logger.go:42: 08:47:29 | sidecar-namespace/4-other-instance | starting test step 4-other-instance logger.go:42: 08:47:30 | sidecar-namespace/4-other-instance | Jaeger:kuttl-test-sought-bee/agent-as-sidecar2 created logger.go:42: 08:47:39 | sidecar-namespace/4-other-instance | test step completed 4-other-instance logger.go:42: 08:47:39 | sidecar-namespace/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 08:47:39 | sidecar-namespace/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 08:47:39 | sidecar-namespace/6-find-service | starting test step 6-find-service logger.go:42: 08:47:39 | sidecar-namespace/6-find-service | Job:kuttl-test-sought-bee/01-find-service created logger.go:42: 08:47:58 | sidecar-namespace/6-find-service | test step completed 6-find-service logger.go:42: 08:47:58 | sidecar-namespace/7-disable-injection | starting test step 7-disable-injection logger.go:42: 08:47:58 | sidecar-namespace/7-disable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="false"] logger.go:42: 08:47:58 | sidecar-namespace/7-disable-injection | namespace/kuttl-test-sought-bee annotate logger.go:42: 08:48:01 | sidecar-namespace/7-disable-injection | test step completed 7-disable-injection logger.go:42: 08:48:01 | sidecar-namespace | sidecar-namespace events from ns kuttl-test-sought-bee: logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:09 +0000 UTC Normal Pod agent-as-sidecar-5c6fbf96d6-2vjkq Binding Scheduled Successfully assigned kuttl-test-sought-bee/agent-as-sidecar-5c6fbf96d6-2vjkq to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:09 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-5c6fbf96d6 SuccessfulCreate Created pod: agent-as-sidecar-5c6fbf96d6-2vjkq replicaset-controller logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:09 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-5c6fbf96d6 to 1 deployment-controller logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:10 +0000 UTC Normal Pod agent-as-sidecar-5c6fbf96d6-2vjkq AddedInterface Add eth0 [10.131.0.104/23] from ovn-kubernetes logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:10 +0000 UTC Normal Pod agent-as-sidecar-5c6fbf96d6-2vjkq.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:10 +0000 UTC Normal Pod agent-as-sidecar-5c6fbf96d6-2vjkq.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:10 +0000 UTC Normal Pod agent-as-sidecar-5c6fbf96d6-2vjkq.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:12 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-2xvxr Binding Scheduled Successfully assigned kuttl-test-sought-bee/vertx-create-span-sidecar-84d458b68c-2xvxr to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:12 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-2xvxr AddedInterface Add eth0 [10.129.2.81/23] from ovn-kubernetes logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:12 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-2xvxr.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:12 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-2xvxr.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:12 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-2xvxr.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:12 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulCreate Created pod: vertx-create-span-sidecar-84d458b68c-2xvxr replicaset-controller logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:12 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-84d458b68c to 1 deployment-controller logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:13 +0000 UTC Normal Pod vertx-create-span-sidecar-85df59f544-vswxg Binding Scheduled Successfully assigned kuttl-test-sought-bee/vertx-create-span-sidecar-85df59f544-vswxg to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:13 +0000 UTC Normal Pod vertx-create-span-sidecar-85df59f544-vswxg AddedInterface Add eth0 [10.128.2.51/23] from ovn-kubernetes logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:13 +0000 UTC Normal Pod vertx-create-span-sidecar-85df59f544-vswxg.spec.containers{vertx-create-span-sidecar} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:13 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-85df59f544 SuccessfulCreate Created pod: vertx-create-span-sidecar-85df59f544-vswxg replicaset-controller logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:13 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-85df59f544 to 1 deployment-controller logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:17 +0000 UTC Normal Pod vertx-create-span-sidecar-85df59f544-vswxg.spec.containers{vertx-create-span-sidecar} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.65s (3.65s including waiting) kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:17 +0000 UTC Normal Pod vertx-create-span-sidecar-85df59f544-vswxg.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:17 +0000 UTC Normal Pod vertx-create-span-sidecar-85df59f544-vswxg.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:17 +0000 UTC Normal Pod vertx-create-span-sidecar-85df59f544-vswxg.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:17 +0000 UTC Normal Pod vertx-create-span-sidecar-85df59f544-vswxg.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:17 +0000 UTC Normal Pod vertx-create-span-sidecar-85df59f544-vswxg.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:18 +0000 UTC Normal Pod 00-find-service-x7525 Binding Scheduled Successfully assigned kuttl-test-sought-bee/00-find-service-x7525 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:18 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-x7525 job-controller logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:19 +0000 UTC Normal Pod 00-find-service-x7525 AddedInterface Add eth0 [10.131.0.105/23] from ovn-kubernetes logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:19 +0000 UTC Normal Pod 00-find-service-x7525.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:19 +0000 UTC Normal Pod 00-find-service-x7525.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:19 +0000 UTC Normal Pod 00-find-service-x7525.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:20 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-2xvxr.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.129.2.81:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:20 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-2xvxr.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.81:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:22 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-2xvxr.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:23 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-2xvxr.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.81:8080/": read tcp 10.129.2.2:47306->10.129.2.81:8080: read: connection reset by peer kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:23 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-2xvxr.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.81:8080/": dial tcp 10.129.2.81:8080: connect: connection refused kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:25 +0000 UTC Warning Pod vertx-create-span-sidecar-85df59f544-vswxg.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.51:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:25 +0000 UTC Warning Pod vertx-create-span-sidecar-85df59f544-vswxg.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.51:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:28 +0000 UTC Normal Pod vertx-create-span-sidecar-85df59f544-vswxg.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:28 +0000 UTC Warning Pod vertx-create-span-sidecar-85df59f544-vswxg.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.51:8080/": read tcp 10.128.2.2:42244->10.128.2.51:8080: read: connection reset by peer kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:28 +0000 UTC Warning Pod vertx-create-span-sidecar-85df59f544-vswxg.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.51:8080/": dial tcp 10.128.2.51:8080: connect: connection refused kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:28 +0000 UTC Normal Pod vertx-create-span-sidecar-85df59f544-vswxg.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:29 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:34 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-2xvxr.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.81:8080/": read tcp 10.129.2.2:50906->10.129.2.81:8080: read: connection reset by peer kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:35 +0000 UTC Normal Pod agent-as-sidecar2-7df48b9cc4-vdz24 Binding Scheduled Successfully assigned kuttl-test-sought-bee/agent-as-sidecar2-7df48b9cc4-vdz24 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:35 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-7df48b9cc4 SuccessfulCreate Created pod: agent-as-sidecar2-7df48b9cc4-vdz24 replicaset-controller logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:35 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-7df48b9cc4 to 1 deployment-controller logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:36 +0000 UTC Warning Pod agent-as-sidecar2-7df48b9cc4-vdz24 FailedMount MountVolume.SetUp failed for volume "agent-as-sidecar2-collector-tls-config-volume" : secret "agent-as-sidecar2-collector-headless-tls" not found kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:36 +0000 UTC Normal Pod agent-as-sidecar2-7df48b9cc4-vdz24 AddedInterface Add eth0 [10.131.0.106/23] from ovn-kubernetes logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:36 +0000 UTC Normal Pod agent-as-sidecar2-7df48b9cc4-vdz24.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:37 +0000 UTC Normal Pod agent-as-sidecar2-7df48b9cc4-vdz24.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:37 +0000 UTC Normal Pod agent-as-sidecar2-7df48b9cc4-vdz24.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:39 +0000 UTC Normal Pod 01-find-service-h4fns Binding Scheduled Successfully assigned kuttl-test-sought-bee/01-find-service-h4fns to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:39 +0000 UTC Normal Pod 01-find-service-h4fns AddedInterface Add eth0 [10.129.2.82/23] from ovn-kubernetes logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:39 +0000 UTC Normal Pod 01-find-service-h4fns.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:39 +0000 UTC Normal Pod 01-find-service-h4fns.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:39 +0000 UTC Normal Pod 01-find-service-h4fns.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:39 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-h4fns job-controller logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:39 +0000 UTC Normal Pod agent-as-sidecar-5c6fbf96d6-2vjkq.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:43 +0000 UTC Normal Pod vertx-create-span-sidecar-5f6d847dc4-p2kjt Binding Scheduled Successfully assigned kuttl-test-sought-bee/vertx-create-span-sidecar-5f6d847dc4-p2kjt to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:43 +0000 UTC Normal Pod vertx-create-span-sidecar-5f6d847dc4-p2kjt AddedInterface Add eth0 [10.131.0.107/23] from ovn-kubernetes logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:43 +0000 UTC Normal Pod vertx-create-span-sidecar-5f6d847dc4-p2kjt.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:43 +0000 UTC Normal Pod vertx-create-span-sidecar-5f6d847dc4-p2kjt.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:43 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-5f6d847dc4 SuccessfulCreate Created pod: vertx-create-span-sidecar-5f6d847dc4-p2kjt replicaset-controller logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:43 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulDelete Deleted pod: vertx-create-span-sidecar-84d458b68c-2xvxr replicaset-controller logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:43 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-84d458b68c to 0 from 1 deployment-controller logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:43 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-5f6d847dc4 to 1 from 0 deployment-controller logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:44 +0000 UTC Normal Pod vertx-create-span-sidecar-5f6d847dc4-p2kjt.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:44 +0000 UTC Normal Pod vertx-create-span-sidecar-5f6d847dc4-p2kjt.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:44 +0000 UTC Normal Pod vertx-create-span-sidecar-5f6d847dc4-p2kjt.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:44 +0000 UTC Normal Pod vertx-create-span-sidecar-5f6d847dc4-p2kjt.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:51 +0000 UTC Warning Pod vertx-create-span-sidecar-5f6d847dc4-p2kjt.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.107:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:51 +0000 UTC Warning Pod vertx-create-span-sidecar-5f6d847dc4-p2kjt.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.107:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:54 +0000 UTC Normal Pod vertx-create-span-sidecar-5f6d847dc4-p2kjt.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:54 +0000 UTC Warning Pod vertx-create-span-sidecar-5f6d847dc4-p2kjt.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.107:8080/": read tcp 10.131.0.2:54892->10.131.0.107:8080: read: connection reset by peer kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:54 +0000 UTC Warning Pod vertx-create-span-sidecar-5f6d847dc4-p2kjt.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.107:8080/": dial tcp 10.131.0.107:8080: connect: connection refused kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:58 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:58 +0000 UTC Normal Pod vertx-create-span-sidecar-7b89d5dcdc-fp84l Binding Scheduled Successfully assigned kuttl-test-sought-bee/vertx-create-span-sidecar-7b89d5dcdc-fp84l to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:58 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7b89d5dcdc SuccessfulCreate Created pod: vertx-create-span-sidecar-7b89d5dcdc-fp84l replicaset-controller logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:58 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-85df59f544 SuccessfulDelete Deleted pod: vertx-create-span-sidecar-85df59f544-vswxg replicaset-controller logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:58 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-85df59f544 to 0 from 1 deployment-controller logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:58 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-7b89d5dcdc to 1 from 0 deployment-controller logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:59 +0000 UTC Normal Pod vertx-create-span-sidecar-7b89d5dcdc-fp84l AddedInterface Add eth0 [10.129.2.83/23] from ovn-kubernetes logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:59 +0000 UTC Normal Pod vertx-create-span-sidecar-7b89d5dcdc-fp84l.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:59 +0000 UTC Normal Pod vertx-create-span-sidecar-7b89d5dcdc-fp84l.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:48:01 | sidecar-namespace | 2023-11-13 08:47:59 +0000 UTC Normal Pod vertx-create-span-sidecar-7b89d5dcdc-fp84l.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:48:01 | sidecar-namespace | Deleting namespace: kuttl-test-sought-bee === CONT kuttl/harness/sidecar-skip-webhook logger.go:42: 08:48:07 | sidecar-skip-webhook | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:48:07 | sidecar-skip-webhook | Creating namespace: kuttl-test-together-alien logger.go:42: 08:48:07 | sidecar-skip-webhook/0-install | starting test step 0-install logger.go:42: 08:48:07 | sidecar-skip-webhook/0-install | Jaeger:kuttl-test-together-alien/agent-as-sidecar created logger.go:42: 08:48:13 | sidecar-skip-webhook/0-install | test step completed 0-install logger.go:42: 08:48:13 | sidecar-skip-webhook/1-install | starting test step 1-install logger.go:42: 08:48:13 | sidecar-skip-webhook/1-install | Deployment:kuttl-test-together-alien/vertx-create-span-sidecar created logger.go:42: 08:48:14 | sidecar-skip-webhook/1-install | test step completed 1-install logger.go:42: 08:48:14 | sidecar-skip-webhook/2-add-anotation-and-label | starting test step 2-add-anotation-and-label logger.go:42: 08:48:14 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name=jaeger-operator --namespace kuttl-test-together-alien] logger.go:42: 08:48:14 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar labeled logger.go:42: 08:48:14 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-together-alien] logger.go:42: 08:48:14 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar annotate logger.go:42: 08:48:15 | sidecar-skip-webhook/2-add-anotation-and-label | test step completed 2-add-anotation-and-label logger.go:42: 08:48:15 | sidecar-skip-webhook/3-remove-label | starting test step 3-remove-label logger.go:42: 08:48:15 | sidecar-skip-webhook/3-remove-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name- --namespace kuttl-test-together-alien] logger.go:42: 08:48:15 | sidecar-skip-webhook/3-remove-label | deployment.apps/vertx-create-span-sidecar unlabeled logger.go:42: 08:48:16 | sidecar-skip-webhook/3-remove-label | test step completed 3-remove-label logger.go:42: 08:48:16 | sidecar-skip-webhook | sidecar-skip-webhook events from ns kuttl-test-together-alien: logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:10 +0000 UTC Normal Pod agent-as-sidecar-7b897b499f-k2fx6 Binding Scheduled Successfully assigned kuttl-test-together-alien/agent-as-sidecar-7b897b499f-k2fx6 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:10 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-7b897b499f SuccessfulCreate Created pod: agent-as-sidecar-7b897b499f-k2fx6 replicaset-controller logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:10 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-7b897b499f to 1 deployment-controller logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:11 +0000 UTC Normal Pod agent-as-sidecar-7b897b499f-k2fx6 AddedInterface Add eth0 [10.131.0.108/23] from ovn-kubernetes logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:11 +0000 UTC Normal Pod agent-as-sidecar-7b897b499f-k2fx6.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:11 +0000 UTC Normal Pod agent-as-sidecar-7b897b499f-k2fx6.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:11 +0000 UTC Normal Pod agent-as-sidecar-7b897b499f-k2fx6.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:13 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-tcqhg Binding Scheduled Successfully assigned kuttl-test-together-alien/vertx-create-span-sidecar-84d458b68c-tcqhg to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:13 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulCreate Created pod: vertx-create-span-sidecar-84d458b68c-tcqhg replicaset-controller logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:13 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-84d458b68c to 1 deployment-controller logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:14 +0000 UTC Normal Pod vertx-create-span-sidecar-7858548469-tsktc Binding Scheduled Successfully assigned kuttl-test-together-alien/vertx-create-span-sidecar-7858548469-tsktc to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:14 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7858548469 SuccessfulCreate Created pod: vertx-create-span-sidecar-7858548469-tsktc replicaset-controller logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:14 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-tcqhg AddedInterface Add eth0 [10.129.2.84/23] from ovn-kubernetes logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:14 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-tcqhg.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:14 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-tcqhg.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:14 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-tcqhg.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:14 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-7858548469 to 1 deployment-controller logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:15 +0000 UTC Normal Pod vertx-create-span-sidecar-7858548469-tsktc AddedInterface Add eth0 [10.128.2.52/23] from ovn-kubernetes logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:15 +0000 UTC Normal Pod vertx-create-span-sidecar-7858548469-tsktc.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:15 +0000 UTC Normal Pod vertx-create-span-sidecar-7858548469-tsktc.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:15 +0000 UTC Normal Pod vertx-create-span-sidecar-7858548469-tsktc.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:15 +0000 UTC Normal Pod vertx-create-span-sidecar-7858548469-tsktc.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:15 +0000 UTC Normal Pod vertx-create-span-sidecar-7858548469-tsktc.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:48:16 | sidecar-skip-webhook | 2023-11-13 08:48:15 +0000 UTC Normal Pod vertx-create-span-sidecar-7858548469-tsktc.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:48:16 | sidecar-skip-webhook | Deleting namespace: kuttl-test-together-alien === CONT kuttl/harness/sidecar-deployment logger.go:42: 08:48:22 | sidecar-deployment | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:48:22 | sidecar-deployment | Creating namespace: kuttl-test-present-werewolf logger.go:42: 08:48:22 | sidecar-deployment/0-install | starting test step 0-install logger.go:42: 08:48:22 | sidecar-deployment/0-install | Jaeger:kuttl-test-present-werewolf/agent-as-sidecar created logger.go:42: 08:48:28 | sidecar-deployment/0-install | test step completed 0-install logger.go:42: 08:48:28 | sidecar-deployment/1-install | starting test step 1-install logger.go:42: 08:48:28 | sidecar-deployment/1-install | Deployment:kuttl-test-present-werewolf/vertx-create-span-sidecar created logger.go:42: 08:48:30 | sidecar-deployment/1-install | test step completed 1-install logger.go:42: 08:48:30 | sidecar-deployment/2-enable-injection | starting test step 2-enable-injection logger.go:42: 08:48:30 | sidecar-deployment/2-enable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-present-werewolf] logger.go:42: 08:48:31 | sidecar-deployment/2-enable-injection | deployment.apps/vertx-create-span-sidecar annotate logger.go:42: 08:48:33 | sidecar-deployment/2-enable-injection | test step completed 2-enable-injection logger.go:42: 08:48:33 | sidecar-deployment/3-find-service | starting test step 3-find-service logger.go:42: 08:48:33 | sidecar-deployment/3-find-service | Job:kuttl-test-present-werewolf/00-find-service created logger.go:42: 08:48:45 | sidecar-deployment/3-find-service | test step completed 3-find-service logger.go:42: 08:48:45 | sidecar-deployment/4-other-instance | starting test step 4-other-instance logger.go:42: 08:48:45 | sidecar-deployment/4-other-instance | Jaeger:kuttl-test-present-werewolf/agent-as-sidecar2 created logger.go:42: 08:48:50 | sidecar-deployment/4-other-instance | test step completed 4-other-instance logger.go:42: 08:48:50 | sidecar-deployment/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 08:48:50 | sidecar-deployment/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 08:48:50 | sidecar-deployment/6-find-service | starting test step 6-find-service logger.go:42: 08:48:50 | sidecar-deployment/6-find-service | Job:kuttl-test-present-werewolf/01-find-service created logger.go:42: 08:49:10 | sidecar-deployment/6-find-service | test step completed 6-find-service logger.go:42: 08:49:10 | sidecar-deployment/7-disable-injection | starting test step 7-disable-injection logger.go:42: 08:49:10 | sidecar-deployment/7-disable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=false --namespace kuttl-test-present-werewolf] logger.go:42: 08:49:10 | sidecar-deployment/7-disable-injection | deployment.apps/vertx-create-span-sidecar annotate logger.go:42: 08:49:12 | sidecar-deployment/7-disable-injection | test step completed 7-disable-injection logger.go:42: 08:49:12 | sidecar-deployment | sidecar-deployment events from ns kuttl-test-present-werewolf: logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:26 +0000 UTC Normal Pod agent-as-sidecar-5c85d47db6-x8fcp Binding Scheduled Successfully assigned kuttl-test-present-werewolf/agent-as-sidecar-5c85d47db6-x8fcp to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:26 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-5c85d47db6 SuccessfulCreate Created pod: agent-as-sidecar-5c85d47db6-x8fcp replicaset-controller logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:26 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-5c85d47db6 to 1 deployment-controller logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:27 +0000 UTC Normal Pod agent-as-sidecar-5c85d47db6-x8fcp AddedInterface Add eth0 [10.131.0.109/23] from ovn-kubernetes logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:27 +0000 UTC Normal Pod agent-as-sidecar-5c85d47db6-x8fcp.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:27 +0000 UTC Normal Pod agent-as-sidecar-5c85d47db6-x8fcp.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:27 +0000 UTC Normal Pod agent-as-sidecar-5c85d47db6-x8fcp.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:28 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-t9qtw Binding Scheduled Successfully assigned kuttl-test-present-werewolf/vertx-create-span-sidecar-84d458b68c-t9qtw to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:28 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulCreate Created pod: vertx-create-span-sidecar-84d458b68c-t9qtw replicaset-controller logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:28 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-84d458b68c to 1 deployment-controller logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:29 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-t9qtw AddedInterface Add eth0 [10.129.2.85/23] from ovn-kubernetes logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:29 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-t9qtw.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:29 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-t9qtw.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:29 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-t9qtw.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:31 +0000 UTC Normal Pod vertx-create-span-sidecar-55dd9c69b4-2wjp5 Binding Scheduled Successfully assigned kuttl-test-present-werewolf/vertx-create-span-sidecar-55dd9c69b4-2wjp5 to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:31 +0000 UTC Normal Pod vertx-create-span-sidecar-55dd9c69b4-2wjp5 AddedInterface Add eth0 [10.128.2.53/23] from ovn-kubernetes logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:31 +0000 UTC Normal Pod vertx-create-span-sidecar-55dd9c69b4-2wjp5.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:31 +0000 UTC Normal Pod vertx-create-span-sidecar-55dd9c69b4-2wjp5.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:31 +0000 UTC Normal Pod vertx-create-span-sidecar-55dd9c69b4-2wjp5.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:31 +0000 UTC Normal Pod vertx-create-span-sidecar-55dd9c69b4-2wjp5.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:31 +0000 UTC Normal Pod vertx-create-span-sidecar-55dd9c69b4-2wjp5.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:31 +0000 UTC Normal Pod vertx-create-span-sidecar-55dd9c69b4-2wjp5.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:31 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-55dd9c69b4 SuccessfulCreate Created pod: vertx-create-span-sidecar-55dd9c69b4-2wjp5 replicaset-controller logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:31 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-55dd9c69b4 to 1 deployment-controller logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:33 +0000 UTC Normal Pod 00-find-service-t927q Binding Scheduled Successfully assigned kuttl-test-present-werewolf/00-find-service-t927q to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:33 +0000 UTC Normal Pod 00-find-service-t927q AddedInterface Add eth0 [10.131.0.110/23] from ovn-kubernetes logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:33 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-t927q job-controller logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:34 +0000 UTC Normal Pod 00-find-service-t927q.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:34 +0000 UTC Normal Pod 00-find-service-t927q.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:34 +0000 UTC Normal Pod 00-find-service-t927q.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:37 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-t9qtw.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.85:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:37 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-t9qtw.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.129.2.85:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:39 +0000 UTC Warning Pod vertx-create-span-sidecar-55dd9c69b4-2wjp5.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.53:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:39 +0000 UTC Warning Pod vertx-create-span-sidecar-55dd9c69b4-2wjp5.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.53:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:39 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-t9qtw.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:39 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-t9qtw.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.85:8080/": read tcp 10.129.2.2:48380->10.129.2.85:8080: read: connection reset by peer kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:39 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-t9qtw.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.85:8080/": dial tcp 10.129.2.85:8080: connect: connection refused kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:41 +0000 UTC Normal Pod vertx-create-span-sidecar-55dd9c69b4-2wjp5.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:42 +0000 UTC Warning Pod vertx-create-span-sidecar-55dd9c69b4-2wjp5.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.53:8080/": read tcp 10.128.2.2:34706->10.128.2.53:8080: read: connection reset by peer kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:42 +0000 UTC Warning Pod vertx-create-span-sidecar-55dd9c69b4-2wjp5.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.53:8080/": dial tcp 10.128.2.53:8080: connect: connection refused kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:44 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:49 +0000 UTC Normal Pod agent-as-sidecar2-6fd5f57946-54xxs Binding Scheduled Successfully assigned kuttl-test-present-werewolf/agent-as-sidecar2-6fd5f57946-54xxs to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:49 +0000 UTC Normal Pod agent-as-sidecar2-6fd5f57946-54xxs AddedInterface Add eth0 [10.129.2.86/23] from ovn-kubernetes logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:49 +0000 UTC Normal Pod agent-as-sidecar2-6fd5f57946-54xxs.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:49 +0000 UTC Normal Pod agent-as-sidecar2-6fd5f57946-54xxs.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:49 +0000 UTC Normal Pod agent-as-sidecar2-6fd5f57946-54xxs.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:49 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-6fd5f57946 SuccessfulCreate Created pod: agent-as-sidecar2-6fd5f57946-54xxs replicaset-controller logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:49 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-6fd5f57946 to 1 deployment-controller logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:49 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-t9qtw.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.85:8080/": read tcp 10.129.2.2:34888->10.129.2.85:8080: read: connection reset by peer kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:50 +0000 UTC Normal Pod 01-find-service-kcp8v Binding Scheduled Successfully assigned kuttl-test-present-werewolf/01-find-service-kcp8v to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:50 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-kcp8v job-controller logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:50 +0000 UTC Normal Pod agent-as-sidecar-5c85d47db6-x8fcp.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:51 +0000 UTC Normal Pod 01-find-service-kcp8v AddedInterface Add eth0 [10.131.0.111/23] from ovn-kubernetes logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:51 +0000 UTC Normal Pod 01-find-service-kcp8v.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-tdvkiyh8/pipeline@sha256:eecc35d9b692970d2ca1802b2e8917209a5d555eb820d2d403545a5961b2d0a2" already present on machine kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:51 +0000 UTC Normal Pod 01-find-service-kcp8v.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:51 +0000 UTC Normal Pod 01-find-service-kcp8v.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:53 +0000 UTC Normal Pod vertx-create-span-sidecar-58f94b86bb-4h4q5 Binding Scheduled Successfully assigned kuttl-test-present-werewolf/vertx-create-span-sidecar-58f94b86bb-4h4q5 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:53 +0000 UTC Normal Pod vertx-create-span-sidecar-58f94b86bb-4h4q5 AddedInterface Add eth0 [10.131.0.112/23] from ovn-kubernetes logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:53 +0000 UTC Normal Pod vertx-create-span-sidecar-58f94b86bb-4h4q5.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:53 +0000 UTC Normal Pod vertx-create-span-sidecar-58f94b86bb-4h4q5.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:53 +0000 UTC Normal Pod vertx-create-span-sidecar-58f94b86bb-4h4q5.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:53 +0000 UTC Normal Pod vertx-create-span-sidecar-58f94b86bb-4h4q5.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:53 +0000 UTC Normal Pod vertx-create-span-sidecar-58f94b86bb-4h4q5.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:53 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-58f94b86bb SuccessfulCreate Created pod: vertx-create-span-sidecar-58f94b86bb-4h4q5 replicaset-controller logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:53 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulDelete Deleted pod: vertx-create-span-sidecar-84d458b68c-t9qtw replicaset-controller logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:53 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-84d458b68c to 0 from 1 deployment-controller logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:53 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-58f94b86bb to 1 from 0 deployment-controller logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:48:54 +0000 UTC Normal Pod vertx-create-span-sidecar-58f94b86bb-4h4q5.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:49:01 +0000 UTC Warning Pod vertx-create-span-sidecar-58f94b86bb-4h4q5.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.112:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:49:01 +0000 UTC Warning Pod vertx-create-span-sidecar-58f94b86bb-4h4q5.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.112:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:49:03 +0000 UTC Normal Pod vertx-create-span-sidecar-58f94b86bb-4h4q5.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:49:04 +0000 UTC Warning Pod vertx-create-span-sidecar-58f94b86bb-4h4q5.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.112:8080/": read tcp 10.131.0.2:54182->10.131.0.112:8080: read: connection reset by peer kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:49:04 +0000 UTC Warning Pod vertx-create-span-sidecar-58f94b86bb-4h4q5.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.112:8080/": dial tcp 10.131.0.112:8080: connect: connection refused kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:49:09 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:49:10 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-55dd9c69b4 SuccessfulDelete Deleted pod: vertx-create-span-sidecar-55dd9c69b4-2wjp5 replicaset-controller logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:49:10 +0000 UTC Normal Pod vertx-create-span-sidecar-86bc5f4cd7-wpr7x Binding Scheduled Successfully assigned kuttl-test-present-werewolf/vertx-create-span-sidecar-86bc5f4cd7-wpr7x to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:49:10 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-86bc5f4cd7 SuccessfulCreate Created pod: vertx-create-span-sidecar-86bc5f4cd7-wpr7x replicaset-controller logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:49:10 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-55dd9c69b4 to 0 from 1 deployment-controller logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:49:10 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-86bc5f4cd7 to 1 from 0 deployment-controller logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:49:11 +0000 UTC Normal Pod vertx-create-span-sidecar-86bc5f4cd7-wpr7x AddedInterface Add eth0 [10.129.2.87/23] from ovn-kubernetes logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:49:11 +0000 UTC Normal Pod vertx-create-span-sidecar-86bc5f4cd7-wpr7x.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:49:11 +0000 UTC Normal Pod vertx-create-span-sidecar-86bc5f4cd7-wpr7x.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:49:12 | sidecar-deployment | 2023-11-13 08:49:11 +0000 UTC Normal Pod vertx-create-span-sidecar-86bc5f4cd7-wpr7x.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:49:12 | sidecar-deployment | Deleting namespace: kuttl-test-present-werewolf === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (138.92s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.79s) --- PASS: kuttl/harness/sidecar-namespace (61.40s) --- PASS: kuttl/harness/sidecar-skip-webhook (15.16s) --- PASS: kuttl/harness/sidecar-deployment (56.42s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name sidecar --report --output /logs/artifacts/sidecar.xml ./artifacts/kuttl-report.xml time="2023-11-13T08:49:19Z" level=debug msg="Setting a new name for the test suites" time="2023-11-13T08:49:19Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-11-13T08:49:19Z" level=debug msg="normalizing test case names" time="2023-11-13T08:49:19Z" level=debug msg="sidecar/artifacts -> sidecar_artifacts" time="2023-11-13T08:49:19Z" level=debug msg="sidecar/sidecar-namespace -> sidecar_sidecar_namespace" time="2023-11-13T08:49:19Z" level=debug msg="sidecar/sidecar-skip-webhook -> sidecar_sidecar_skip_webhook" time="2023-11-13T08:49:19Z" level=debug msg="sidecar/sidecar-deployment -> sidecar_sidecar_deployment" +------------------------------+--------+ | NAME | RESULT | +------------------------------+--------+ | sidecar_artifacts | passed | | sidecar_sidecar_namespace | passed | | sidecar_sidecar_skip_webhook | passed | | sidecar_sidecar_deployment | passed | +------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 1 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/sidecar.xml + '[' 0 -gt 0 ']' + '[' 1 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh streaming false true + '[' 3 -ne 3 ']' + test_suite_name=streaming + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/streaming.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-streaming make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=0.32.0 \ SKIP_KAFKA=false \ SKIP_ES_EXTERNAL=true \ ./tests/e2e/streaming/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-08-062604 True False 60m Cluster version is 4.15.0-0.nightly-2023-11-08-062604' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-08-062604 True False 60m Cluster version is 4.15.0-0.nightly-2023-11-08-062604' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 0.32.0 ']' ++ version_le 0.32.0 0.25.0 +++ echo 0.32.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 0.32.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/streaming/render.sh ++ export SUITE_DIR=./tests/e2e/streaming ++ SUITE_DIR=./tests/e2e/streaming ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/streaming ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + '[' false = true ']' + start_test streaming-simple + '[' 1 -ne 1 ']' + test_name=streaming-simple + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-simple' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-simple\e[0m' Rendering files for test streaming-simple + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build + '[' _build '!=' _build ']' + mkdir -p streaming-simple + cd streaming-simple + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./1-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/streaming-jaeger-assert.yaml.template -o ./04-assert.yaml + render_smoke_test simple-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=simple-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + export JAEGER_NAME=simple-streaming + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-tls + '[' 1 -ne 1 ']' + test_name=streaming-with-tls + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-tls' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-tls\e[0m' Rendering files for test streaming-with-tls + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-simple + '[' streaming-simple '!=' _build ']' + cd .. + mkdir -p streaming-with-tls + cd streaming-with-tls + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./1-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + render_smoke_test tls-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=tls-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + export JAEGER_NAME=tls-streaming + JAEGER_NAME=tls-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-autoprovisioning-autoscale + '[' 1 -ne 1 ']' + test_name=streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-autoprovisioning-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-autoprovisioning-autoscale\e[0m' Rendering files for test streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-with-tls + '[' streaming-with-tls '!=' _build ']' + cd .. + mkdir -p streaming-with-autoprovisioning-autoscale + cd streaming-with-autoprovisioning-autoscale + '[' true = true ']' + rm ./00-install.yaml ./00-assert.yaml + render_install_elasticsearch upstream 01 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=01 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./01-assert.yaml + jaeger_name=auto-provisioned + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="20Mi"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="500m"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.autoscale=true ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.minReplicas=1 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.maxReplicas=2 ./02-install.yaml + render_assert_kafka true auto-provisioned 03 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provisioned + test_step=03 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].envXX | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].envXX | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].envXX | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./03-assert.yaml ++ expr 03 + 1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./4-assert.yaml ++ expr 03 + 2 + CLUSTER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./05-assert.yaml + render_install_tracegen auto-provisioned 06 + '[' 2 -ne 2 ']' + jaeger=auto-provisioned + step=06 + replicas=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/tracegen.yaml -o ./06-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.replicas=1 ./06-install.yaml + sed -i s~simple-prod~auto-provisioned~gi ./06-install.yaml + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-tracegen.yaml.template -o ./06-assert.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running streaming E2E tests' Running streaming E2E tests + cd tests/e2e/streaming/_build + set +e + KUBECONFIG=/tmp/kubeconfig-894131460 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 4 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/streaming-simple === PAUSE kuttl/harness/streaming-simple === RUN kuttl/harness/streaming-with-autoprovisioning-autoscale === PAUSE kuttl/harness/streaming-with-autoprovisioning-autoscale === RUN kuttl/harness/streaming-with-tls === PAUSE kuttl/harness/streaming-with-tls === CONT kuttl/harness/artifacts logger.go:42: 08:49:32 | artifacts | Creating namespace: kuttl-test-funny-slug logger.go:42: 08:49:32 | artifacts | artifacts events from ns kuttl-test-funny-slug: logger.go:42: 08:49:32 | artifacts | Deleting namespace: kuttl-test-funny-slug === CONT kuttl/harness/streaming-with-autoprovisioning-autoscale logger.go:42: 08:49:38 | streaming-with-autoprovisioning-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:49:38 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:49:38 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:49:38 | streaming-with-autoprovisioning-autoscale | Creating namespace: kuttl-test-top-haddock logger.go:42: 08:49:38 | streaming-with-autoprovisioning-autoscale/1-install | starting test step 1-install logger.go:42: 08:49:38 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc create sa deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 08:49:38 | streaming-with-autoprovisioning-autoscale/1-install | serviceaccount/deploy-elasticsearch created logger.go:42: 08:49:38 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc adm policy add-scc-to-user privileged -z deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 08:49:38 | streaming-with-autoprovisioning-autoscale/1-install | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:privileged added: "deploy-elasticsearch" logger.go:42: 08:49:38 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 6] logger.go:42: 08:49:44 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_0.yml -n $NAMESPACE] logger.go:42: 08:49:44 | streaming-with-autoprovisioning-autoscale/1-install | statefulset.apps/elasticsearch created logger.go:42: 08:49:44 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 3] logger.go:42: 08:49:48 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_1.yml -n $NAMESPACE] logger.go:42: 08:49:48 | streaming-with-autoprovisioning-autoscale/1-install | service/elasticsearch created logger.go:42: 08:50:05 | streaming-with-autoprovisioning-autoscale/1-install | test step completed 1-install logger.go:42: 08:50:05 | streaming-with-autoprovisioning-autoscale/2-install | starting test step 2-install logger.go:42: 08:50:06 | streaming-with-autoprovisioning-autoscale/2-install | Jaeger:kuttl-test-top-haddock/auto-provisioned created logger.go:42: 08:50:06 | streaming-with-autoprovisioning-autoscale/2-install | test step completed 2-install logger.go:42: 08:50:06 | streaming-with-autoprovisioning-autoscale/3- | starting test step 3- logger.go:42: 08:50:45 | streaming-with-autoprovisioning-autoscale/3- | test step completed 3- logger.go:42: 08:50:45 | streaming-with-autoprovisioning-autoscale/4- | starting test step 4- logger.go:42: 08:51:21 | streaming-with-autoprovisioning-autoscale/4- | test step completed 4- logger.go:42: 08:51:21 | streaming-with-autoprovisioning-autoscale/5- | starting test step 5- logger.go:42: 08:51:44 | streaming-with-autoprovisioning-autoscale/5- | test step completed 5- logger.go:42: 08:51:44 | streaming-with-autoprovisioning-autoscale/6-install | starting test step 6-install logger.go:42: 08:51:44 | streaming-with-autoprovisioning-autoscale/6-install | Deployment:kuttl-test-top-haddock/tracegen created logger.go:42: 08:51:50 | streaming-with-autoprovisioning-autoscale/6-install | test step completed 6-install logger.go:42: 08:51:50 | streaming-with-autoprovisioning-autoscale/7- | starting test step 7- logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale/7- | test step failed 7- case.go:364: failed in step 7- case.go:366: --- Deployment:kuttl-test-top-haddock/auto-provisioned-ingester +++ Deployment:kuttl-test-top-haddock/auto-provisioned-ingester @@ -1,8 +1,320 @@ apiVersion: apps/v1 kind: Deployment metadata: + labels: + app: jaeger + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: auto-provisioned + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: auto-provisioned-ingester + app.kubernetes.io/part-of: jaeger + managedFields: + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:ownerReferences: + .: {} + k:{"uid":"3cb296d2-1f3a-4c61-bfbf-26eb4c71bf68"}: {} + f:spec: + f:progressDeadlineSeconds: {} + f:replicas: {} + f:revisionHistoryLimit: {} + f:selector: {} + f:strategy: + f:type: {} + f:template: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.istio.io/inject: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:spec: + f:containers: + k:{"name":"jaeger-ingester"}: + .: {} + f:args: {} + f:env: + .: {} + k:{"name":"SPAN_STORAGE_TYPE"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:livenessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":14270,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:readinessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:resources: + .: {} + f:requests: + .: {} + f:memory: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/var/run/secrets/auto-provisioned"}: + .: {} + f:mountPath: {} + f:name: {} + k:{"mountPath":"/var/run/secrets/auto-provisioned-cluster-ca"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:enableServiceLinks: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:serviceAccount: {} + f:serviceAccountName: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"auto-provisioned-trusted-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"kafkauser-auto-provisioned"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"kafkauser-auto-provisioned-cluster-ca"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: Go-http-client + operation: Update + time: "2023-11-13T08:51:45Z" + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:deployment.kubernetes.io/revision: {} + f:status: + f:availableReplicas: {} + f:conditions: + .: {} + k:{"type":"Available"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + k:{"type":"Progressing"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + f:observedGeneration: {} + f:readyReplicas: {} + f:replicas: {} + f:updatedReplicas: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2023-11-13T08:51:47Z" name: auto-provisioned-ingester namespace: kuttl-test-top-haddock + ownerReferences: + - apiVersion: jaegertracing.io/v1 + controller: true + kind: Jaeger + name: auto-provisioned + uid: 3cb296d2-1f3a-4c61-bfbf-26eb4c71bf68 +spec: + progressDeadlineSeconds: 600 + replicas: 1 + revisionHistoryLimit: 10 + selector: + matchLabels: + app: jaeger + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: auto-provisioned + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: auto-provisioned-ingester + app.kubernetes.io/part-of: jaeger + strategy: + type: Recreate + template: + metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "14270" + prometheus.io/scrape: "true" + sidecar.istio.io/inject: "false" + creationTimestamp: null + labels: + app: jaeger + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: auto-provisioned + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: auto-provisioned-ingester + app.kubernetes.io/part-of: jaeger + spec: + containers: + - args: + - --es.server-urls=http://elasticsearch:9200 + - --kafka.consumer.authentication=tls + - --kafka.consumer.brokers=auto-provisioned-kafka-bootstrap.kuttl-test-top-haddock.svc.cluster.local:9093 + - --kafka.consumer.tls.ca=/var/run/secrets/auto-provisioned-cluster-ca/ca.crt + - --kafka.consumer.tls.cert=/var/run/secrets/auto-provisioned/user.crt + - --kafka.consumer.tls.enabled=true + - --kafka.consumer.tls.key=/var/run/secrets/auto-provisioned/user.key + env: + - name: SPAN_STORAGE_TYPE + value: elasticsearch + image: registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:109a3676bfaccda0021c57bbe82bceed5140faaedb8bad4d7d2be7cd660de039 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 14270 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-ingester + ports: + - containerPort: 14270 + name: admin-http + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 14270 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: + requests: + memory: 500m + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /var/run/secrets/auto-provisioned + name: kafkauser-auto-provisioned + - mountPath: /var/run/secrets/auto-provisioned-cluster-ca + name: kafkauser-auto-provisioned-cluster-ca + - mountPath: /etc/pki/ca-trust/extracted/pem + name: auto-provisioned-trusted-ca + readOnly: true + dnsPolicy: ClusterFirst + enableServiceLinks: false + restartPolicy: Always + schedulerName: default-scheduler + securityContext: {} + serviceAccount: auto-provisioned + serviceAccountName: auto-provisioned + terminationGracePeriodSeconds: 30 + volumes: + - name: kafkauser-auto-provisioned + secret: + defaultMode: 420 + secretName: auto-provisioned + - name: kafkauser-auto-provisioned-cluster-ca + secret: + defaultMode: 420 + secretName: auto-provisioned-cluster-ca-cert + - configMap: + defaultMode: 420 + items: + - key: ca-bundle.crt + path: tls-ca-bundle.pem + name: auto-provisioned-trusted-ca + name: auto-provisioned-trusted-ca status: - readyReplicas: 2 + availableReplicas: 1 + conditions: + - lastTransitionTime: "2023-11-13T08:51:47Z" + lastUpdateTime: "2023-11-13T08:51:47Z" + message: Deployment has minimum availability. + reason: MinimumReplicasAvailable + status: "True" + type: Available + - lastTransitionTime: "2023-11-13T08:51:45Z" + lastUpdateTime: "2023-11-13T08:51:47Z" + message: ReplicaSet "auto-provisioned-ingester-6d8bdff6d7" has successfully progressed. + reason: NewReplicaSetAvailable + status: "True" + type: Progressing + observedGeneration: 1 + readyReplicas: 1 + replicas: 1 + updatedReplicas: 1 case.go:366: resource Deployment:kuttl-test-top-haddock/auto-provisioned-ingester: .status.readyReplicas: value mismatch, expected: 2 != actual: 1 logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | streaming-with-autoprovisioning-autoscale events from ns kuttl-test-top-haddock: logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:49:45 +0000 UTC Normal Pod elasticsearch-0 Binding Scheduled Successfully assigned kuttl-test-top-haddock/elasticsearch-0 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:49:45 +0000 UTC Normal Pod elasticsearch-0 AddedInterface Add eth0 [10.131.0.113/23] from ovn-kubernetes logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:49:45 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulling Pulling image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:49:45 +0000 UTC Normal StatefulSet.apps elasticsearch SuccessfulCreate create Pod elasticsearch-0 in StatefulSet elasticsearch successful statefulset-controller logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:49:53 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulled Successfully pulled image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" in 8.439s (8.439s including waiting) kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:49:53 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:49:53 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:50:00 +0000 UTC Warning Pod elasticsearch-0.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Get "http://10.131.0.113:9200/": dial tcp 10.131.0.113:9200: connect: connection refused kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:50:11 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-zookeeper NoPods No matching pods found controllermanager logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:50:11 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:50:11 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:50:11 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-top-haddock/data-auto-provisioned-zookeeper-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-7bf58cbfc9-shhxm_b3413021-d802-4087-93bb-eb7ae36e5561 logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:50:14 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-235010dd-2499-42bc-a879-c14f3c773074 ebs.csi.aws.com_aws-ebs-csi-driver-controller-7bf58cbfc9-shhxm_b3413021-d802-4087-93bb-eb7ae36e5561 logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:50:15 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-top-haddock/auto-provisioned-zookeeper-0 to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:50:17 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-235010dd-2499-42bc-a879-c14f3c773074" attachdetach-controller logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:50:25 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 AddedInterface Add eth0 [10.129.2.88/23] from ovn-kubernetes logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:50:25 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" already present on machine kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:50:25 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:50:25 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:50:46 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:50:47 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-kafka NoPods No matching pods found controllermanager logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:50:47 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:50:47 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-top-haddock/data-0-auto-provisioned-kafka-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-7bf58cbfc9-shhxm_b3413021-d802-4087-93bb-eb7ae36e5561 logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:50:50 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-de165579-9e33-4694-b64b-e04886286945 ebs.csi.aws.com_aws-ebs-csi-driver-controller-7bf58cbfc9-shhxm_b3413021-d802-4087-93bb-eb7ae36e5561 logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:50:51 +0000 UTC Normal Pod auto-provisioned-kafka-0 Binding Scheduled Successfully assigned kuttl-test-top-haddock/auto-provisioned-kafka-0 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:50:53 +0000 UTC Normal Pod auto-provisioned-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-de165579-9e33-4694-b64b-e04886286945" attachdetach-controller logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:00 +0000 UTC Normal Pod auto-provisioned-kafka-0 AddedInterface Add eth0 [10.131.0.114/23] from ovn-kubernetes logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:00 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" already present on machine kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:00 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:00 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:22 +0000 UTC Normal Pod auto-provisioned-entity-operator-77f477bc49-rvw2v Binding Scheduled Successfully assigned kuttl-test-top-haddock/auto-provisioned-entity-operator-77f477bc49-rvw2v to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:22 +0000 UTC Normal Pod auto-provisioned-entity-operator-77f477bc49-rvw2v AddedInterface Add eth0 [10.131.0.115/23] from ovn-kubernetes logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:22 +0000 UTC Normal Pod auto-provisioned-entity-operator-77f477bc49-rvw2v.spec.containers{topic-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:464b04e622e0b3472e8a1e1ce8a2efd32cf27fc2056d3d589bfe6b5f9ac0bf4e" already present on machine kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:22 +0000 UTC Normal Pod auto-provisioned-entity-operator-77f477bc49-rvw2v.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:22 +0000 UTC Normal Pod auto-provisioned-entity-operator-77f477bc49-rvw2v.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:22 +0000 UTC Normal Pod auto-provisioned-entity-operator-77f477bc49-rvw2v.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:464b04e622e0b3472e8a1e1ce8a2efd32cf27fc2056d3d589bfe6b5f9ac0bf4e" already present on machine kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:22 +0000 UTC Normal Pod auto-provisioned-entity-operator-77f477bc49-rvw2v.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:22 +0000 UTC Normal Pod auto-provisioned-entity-operator-77f477bc49-rvw2v.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:22 +0000 UTC Normal Pod auto-provisioned-entity-operator-77f477bc49-rvw2v.spec.containers{tls-sidecar} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" already present on machine kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:22 +0000 UTC Normal Pod auto-provisioned-entity-operator-77f477bc49-rvw2v.spec.containers{tls-sidecar} Created Created container tls-sidecar kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:22 +0000 UTC Normal ReplicaSet.apps auto-provisioned-entity-operator-77f477bc49 SuccessfulCreate Created pod: auto-provisioned-entity-operator-77f477bc49-rvw2v replicaset-controller logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:22 +0000 UTC Normal Deployment.apps auto-provisioned-entity-operator ScalingReplicaSet Scaled up replica set auto-provisioned-entity-operator-77f477bc49 to 1 deployment-controller logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:23 +0000 UTC Normal Pod auto-provisioned-entity-operator-77f477bc49-rvw2v.spec.containers{tls-sidecar} Started Started container tls-sidecar kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:44 +0000 UTC Normal Pod tracegen-7656c8ff8-2mqmj Binding Scheduled Successfully assigned kuttl-test-top-haddock/tracegen-7656c8ff8-2mqmj to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:44 +0000 UTC Warning Pod tracegen-7656c8ff8-2mqmj FailedMount MountVolume.SetUp failed for volume "auto-provisioned-service-ca" : configmap "auto-provisioned-service-ca" not found kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:44 +0000 UTC Warning Pod tracegen-7656c8ff8-2mqmj FailedMount MountVolume.SetUp failed for volume "auto-provisioned-trusted-ca" : configmap "auto-provisioned-trusted-ca" not found kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:44 +0000 UTC Normal ReplicaSet.apps tracegen-7656c8ff8 SuccessfulCreate Created pod: tracegen-7656c8ff8-2mqmj replicaset-controller logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:44 +0000 UTC Normal Deployment.apps tracegen ScalingReplicaSet Scaled up replica set tracegen-7656c8ff8 to 1 deployment-controller logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:45 +0000 UTC Normal Pod auto-provisioned-collector-59b8cbf848-8c9qh Binding Scheduled Successfully assigned kuttl-test-top-haddock/auto-provisioned-collector-59b8cbf848-8c9qh to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:45 +0000 UTC Normal ReplicaSet.apps auto-provisioned-collector-59b8cbf848 SuccessfulCreate Created pod: auto-provisioned-collector-59b8cbf848-8c9qh replicaset-controller logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:45 +0000 UTC Normal Deployment.apps auto-provisioned-collector ScalingReplicaSet Scaled up replica set auto-provisioned-collector-59b8cbf848 to 1 deployment-controller logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:45 +0000 UTC Normal Pod auto-provisioned-ingester-6d8bdff6d7-8cmtd Binding Scheduled Successfully assigned kuttl-test-top-haddock/auto-provisioned-ingester-6d8bdff6d7-8cmtd to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:45 +0000 UTC Normal ReplicaSet.apps auto-provisioned-ingester-6d8bdff6d7 SuccessfulCreate Created pod: auto-provisioned-ingester-6d8bdff6d7-8cmtd replicaset-controller logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:45 +0000 UTC Normal Deployment.apps auto-provisioned-ingester ScalingReplicaSet Scaled up replica set auto-provisioned-ingester-6d8bdff6d7 to 1 deployment-controller logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:45 +0000 UTC Normal Pod auto-provisioned-query-7c55d88485-n9k9x Binding Scheduled Successfully assigned kuttl-test-top-haddock/auto-provisioned-query-7c55d88485-n9k9x to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:45 +0000 UTC Warning Pod auto-provisioned-query-7c55d88485-n9k9x FailedMount MountVolume.SetUp failed for volume "auto-provisioned-ui-oauth-proxy-tls" : secret "auto-provisioned-ui-oauth-proxy-tls" not found kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:45 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-7c55d88485 SuccessfulCreate Created pod: auto-provisioned-query-7c55d88485-n9k9x replicaset-controller logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:45 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-7c55d88485 to 1 deployment-controller logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:46 +0000 UTC Warning Pod auto-provisioned-collector-59b8cbf848-8c9qh FailedMount MountVolume.SetUp failed for volume "auto-provisioned-sampling-configuration-volume" : failed to sync configmap cache: timed out waiting for the condition kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:46 +0000 UTC Normal Pod auto-provisioned-ingester-6d8bdff6d7-8cmtd AddedInterface Add eth0 [10.131.0.116/23] from ovn-kubernetes logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:46 +0000 UTC Normal Pod auto-provisioned-ingester-6d8bdff6d7-8cmtd.spec.containers{jaeger-ingester} Pulled Container image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:109a3676bfaccda0021c57bbe82bceed5140faaedb8bad4d7d2be7cd660de039" already present on machine kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:46 +0000 UTC Normal Pod auto-provisioned-ingester-6d8bdff6d7-8cmtd.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:46 +0000 UTC Normal Pod auto-provisioned-ingester-6d8bdff6d7-8cmtd.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:46 +0000 UTC Normal Pod tracegen-7656c8ff8-2mqmj AddedInterface Add eth0 [10.128.2.54/23] from ovn-kubernetes logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:46 +0000 UTC Normal Pod tracegen-7656c8ff8-2mqmj.spec.containers{tracegen} Pulling Pulling image "jaegertracing/jaeger-tracegen:1.49.0" kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:47 +0000 UTC Normal Pod auto-provisioned-collector-59b8cbf848-8c9qh AddedInterface Add eth0 [10.129.2.89/23] from ovn-kubernetes logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:47 +0000 UTC Normal Pod auto-provisioned-collector-59b8cbf848-8c9qh.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:47 +0000 UTC Normal Pod auto-provisioned-collector-59b8cbf848-8c9qh.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:47 +0000 UTC Normal Pod auto-provisioned-collector-59b8cbf848-8c9qh.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:47 +0000 UTC Normal Pod auto-provisioned-query-7c55d88485-n9k9x AddedInterface Add eth0 [10.129.2.90/23] from ovn-kubernetes logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:47 +0000 UTC Normal Pod auto-provisioned-query-7c55d88485-n9k9x.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:47 +0000 UTC Normal Pod auto-provisioned-query-7c55d88485-n9k9x.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:47 +0000 UTC Normal Pod auto-provisioned-query-7c55d88485-n9k9x.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:47 +0000 UTC Normal Pod auto-provisioned-query-7c55d88485-n9k9x.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:47 +0000 UTC Normal Pod auto-provisioned-query-7c55d88485-n9k9x.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:47 +0000 UTC Normal Pod auto-provisioned-query-7c55d88485-n9k9x.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:47 +0000 UTC Normal Pod auto-provisioned-query-7c55d88485-n9k9x.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:47 +0000 UTC Normal Pod auto-provisioned-query-7c55d88485-n9k9x.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:47 +0000 UTC Normal Pod auto-provisioned-query-7c55d88485-n9k9x.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:48 +0000 UTC Normal Pod tracegen-7656c8ff8-2mqmj.spec.containers{tracegen} Pulled Successfully pulled image "jaegertracing/jaeger-tracegen:1.49.0" in 2.134s (2.134s including waiting) kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:48 +0000 UTC Normal Pod tracegen-7656c8ff8-2mqmj.spec.containers{tracegen} Created Created container tracegen kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:48 +0000 UTC Normal Pod tracegen-7656c8ff8-2mqmj.spec.containers{tracegen} Started Started container tracegen kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:48 +0000 UTC Normal Pod tracegen-7656c8ff8-2mqmj.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:48 +0000 UTC Normal Pod tracegen-7656c8ff8-2mqmj.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:51:48 +0000 UTC Normal Pod tracegen-7656c8ff8-2mqmj.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:52:02 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:52:02 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:52:02 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:52:02 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:52:02 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:52:02 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:52:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:52:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod auto-provisioned-collector-59b8cbf848-8c9qh horizontal-pod-autoscaler logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:52:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:52:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:52:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedComputeMetricsReplicas invalid metrics (1 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:57:03 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod auto-provisioned-collector-59b8cbf848-8c9qh horizontal-pod-autoscaler logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | 2023-11-13 08:57:03 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-ingester of Pod auto-provisioned-ingester-6d8bdff6d7-8cmtd horizontal-pod-autoscaler logger.go:42: 08:58:50 | streaming-with-autoprovisioning-autoscale | Deleting namespace: kuttl-test-top-haddock === CONT kuttl/harness/streaming-with-tls logger.go:42: 08:59:30 | streaming-with-tls | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:59:30 | streaming-with-tls | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:59:30 | streaming-with-tls | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:59:30 | streaming-with-tls | Creating namespace: kuttl-test-splendid-liger logger.go:42: 08:59:30 | streaming-with-tls/0-install | starting test step 0-install logger.go:42: 08:59:30 | streaming-with-tls/0-install | running command: [sh -c cd /tmp/jaeger-tests && make undeploy-kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true] logger.go:42: 08:59:30 | streaming-with-tls/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 08:59:31 | streaming-with-tls/0-install | >>>> Skiping kafka-operator undeploy logger.go:42: 08:59:31 | streaming-with-tls/0-install | kubectl delete --namespace kuttl-test-splendid-liger -f tests/_build/kafka-example.yaml 2>&1 || true logger.go:42: 08:59:31 | streaming-with-tls/0-install | error: the path "tests/_build/kafka-example.yaml" does not exist logger.go:42: 08:59:31 | streaming-with-tls/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 08:59:31 | streaming-with-tls/0-install | running command: [sh -c cd /tmp/jaeger-tests && make kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true KAFKA_VERSION=0.32.0] logger.go:42: 08:59:31 | streaming-with-tls/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 08:59:31 | streaming-with-tls/0-install | >>>> Creating namespace kuttl-test-splendid-liger logger.go:42: 08:59:31 | streaming-with-tls/0-install | kubectl create namespace kuttl-test-splendid-liger 2>&1 | grep -v "already exists" || true logger.go:42: 08:59:31 | streaming-with-tls/0-install | >>>> Skipping kafka-operator deployment, assuming it has been installed via OperatorHub logger.go:42: 08:59:31 | streaming-with-tls/0-install | >>>> Creating namespace kuttl-test-splendid-liger logger.go:42: 08:59:31 | streaming-with-tls/0-install | mkdir -p tests/_build/ logger.go:42: 08:59:31 | streaming-with-tls/0-install | kubectl create namespace kuttl-test-splendid-liger 2>&1 | grep -v "already exists" || true logger.go:42: 08:59:31 | streaming-with-tls/0-install | curl --fail --location "https://raw.githubusercontent.com/strimzi/strimzi-kafka-operator/0.32.0/examples/kafka/kafka-persistent-single.yaml" --output tests/_build/kafka-example.yaml --create-dirs logger.go:42: 08:59:31 | streaming-with-tls/0-install | % Total % Received % Xferd Average Speed Time Time Time Current logger.go:42: 08:59:31 | streaming-with-tls/0-install | Dload Upload Total Spent Left Speed logger.go:42: 08:59:31 | streaming-with-tls/0-install | 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 865 100 865 0 0 6828 0 --:--:-- --:--:-- --:--:-- 6865 logger.go:42: 08:59:31 | streaming-with-tls/0-install | "sed" -i 's/size: 100Gi/size: 10Gi/g' tests/_build/kafka-example.yaml logger.go:42: 08:59:31 | streaming-with-tls/0-install | kubectl -n kuttl-test-splendid-liger apply --dry-run=client -f tests/_build/kafka-example.yaml logger.go:42: 08:59:31 | streaming-with-tls/0-install | kafka.kafka.strimzi.io/my-cluster created (dry run) logger.go:42: 08:59:31 | streaming-with-tls/0-install | kubectl -n kuttl-test-splendid-liger apply -f tests/_build/kafka-example.yaml 2>&1 | grep -v "already exists" || true logger.go:42: 08:59:32 | streaming-with-tls/0-install | kafka.kafka.strimzi.io/my-cluster created logger.go:42: 08:59:32 | streaming-with-tls/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 09:06:32 | streaming-with-tls/0-install | test step failed 0-install case.go:364: failed in step 0-install case.go:366: strimzipodsets.core.strimzi.io "my-cluster-zookeeper" not found logger.go:42: 09:06:32 | streaming-with-tls | streaming-with-tls events from ns kuttl-test-splendid-liger: logger.go:42: 09:06:32 | streaming-with-tls | Deleting namespace: kuttl-test-splendid-liger === CONT kuttl/harness/streaming-simple logger.go:42: 09:06:38 | streaming-simple | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:06:38 | streaming-simple | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:06:38 | streaming-simple | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:06:38 | streaming-simple | Creating namespace: kuttl-test-equipped-elk logger.go:42: 09:06:38 | streaming-simple/0-install | starting test step 0-install logger.go:42: 09:06:38 | streaming-simple/0-install | running command: [sh -c cd /tmp/jaeger-tests && make undeploy-kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true] logger.go:42: 09:06:38 | streaming-simple/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 09:06:38 | streaming-simple/0-install | >>>> Skiping kafka-operator undeploy logger.go:42: 09:06:38 | streaming-simple/0-install | kubectl delete --namespace kuttl-test-equipped-elk -f tests/_build/kafka-example.yaml 2>&1 || true logger.go:42: 09:06:38 | streaming-simple/0-install | Error from server (NotFound): error when deleting "tests/_build/kafka-example.yaml": kafkas.kafka.strimzi.io "my-cluster" not found logger.go:42: 09:06:38 | streaming-simple/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 09:06:38 | streaming-simple/0-install | running command: [sh -c cd /tmp/jaeger-tests && make kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true KAFKA_VERSION=0.32.0] logger.go:42: 09:06:38 | streaming-simple/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 09:06:38 | streaming-simple/0-install | >>>> Creating namespace kuttl-test-equipped-elk logger.go:42: 09:06:38 | streaming-simple/0-install | kubectl create namespace kuttl-test-equipped-elk 2>&1 | grep -v "already exists" || true logger.go:42: 09:06:38 | streaming-simple/0-install | >>>> Skipping kafka-operator deployment, assuming it has been installed via OperatorHub logger.go:42: 09:06:38 | streaming-simple/0-install | >>>> Creating namespace kuttl-test-equipped-elk logger.go:42: 09:06:38 | streaming-simple/0-install | mkdir -p tests/_build/ logger.go:42: 09:06:38 | streaming-simple/0-install | kubectl create namespace kuttl-test-equipped-elk 2>&1 | grep -v "already exists" || true logger.go:42: 09:06:39 | streaming-simple/0-install | curl --fail --location "https://raw.githubusercontent.com/strimzi/strimzi-kafka-operator/0.32.0/examples/kafka/kafka-persistent-single.yaml" --output tests/_build/kafka-example.yaml --create-dirs logger.go:42: 09:06:39 | streaming-simple/0-install | % Total % Received % Xferd Average Speed Time Time Time Current logger.go:42: 09:06:39 | streaming-simple/0-install | Dload Upload Total Spent Left Speed logger.go:42: 09:06:39 | streaming-simple/0-install | 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 865 100 865 0 0 6999 0 --:--:-- --:--:-- --:--:-- 7032 logger.go:42: 09:06:39 | streaming-simple/0-install | "sed" -i 's/size: 100Gi/size: 10Gi/g' tests/_build/kafka-example.yaml logger.go:42: 09:06:39 | streaming-simple/0-install | kubectl -n kuttl-test-equipped-elk apply --dry-run=client -f tests/_build/kafka-example.yaml logger.go:42: 09:06:39 | streaming-simple/0-install | kafka.kafka.strimzi.io/my-cluster created (dry run) logger.go:42: 09:06:39 | streaming-simple/0-install | kubectl -n kuttl-test-equipped-elk apply -f tests/_build/kafka-example.yaml 2>&1 | grep -v "already exists" || true logger.go:42: 09:06:39 | streaming-simple/0-install | kafka.kafka.strimzi.io/my-cluster created logger.go:42: 09:06:39 | streaming-simple/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 09:13:40 | streaming-simple/0-install | test step failed 0-install case.go:364: failed in step 0-install case.go:366: strimzipodsets.core.strimzi.io "my-cluster-zookeeper" not found logger.go:42: 09:13:40 | streaming-simple | streaming-simple events from ns kuttl-test-equipped-elk: logger.go:42: 09:13:40 | streaming-simple | Deleting namespace: kuttl-test-equipped-elk === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- FAIL: kuttl (1454.14s) --- FAIL: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.90s) --- FAIL: kuttl/harness/streaming-with-autoprovisioning-autoscale (592.68s) --- FAIL: kuttl/harness/streaming-with-tls (427.74s) --- FAIL: kuttl/harness/streaming-simple (427.65s) FAIL + exit_code=1 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name streaming --report --output /logs/artifacts/streaming.xml ./artifacts/kuttl-report.xml time="2023-11-13T09:13:46Z" level=debug msg="Setting a new name for the test suites" time="2023-11-13T09:13:46Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-11-13T09:13:46Z" level=debug msg="normalizing test case names" time="2023-11-13T09:13:46Z" level=debug msg="streaming/artifacts -> streaming_artifacts" time="2023-11-13T09:13:46Z" level=debug msg="streaming/streaming-with-autoprovisioning-autoscale -> streaming_streaming_with_autoprovisioning_autoscale" time="2023-11-13T09:13:46Z" level=debug msg="streaming/streaming-with-tls -> streaming_streaming_with_tls" time="2023-11-13T09:13:46Z" level=debug msg="streaming/streaming-simple -> streaming_streaming_simple" +-----------------------------------------------------+--------+ | NAME | RESULT | +-----------------------------------------------------+--------+ | streaming_artifacts | passed | | streaming_streaming_with_autoprovisioning_autoscale | failed | | streaming_streaming_with_tls | failed | | streaming_streaming_simple | failed | +-----------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 1 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/sidecar.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/streaming.xml + '[' 3 -gt 0 ']' + count=2 + '[' 2 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh ui false true + '[' 3 -ne 3 ']' + test_suite_name=ui + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/ui.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-ui make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true ./tests/e2e/ui/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-08-062604 True False 85m Cluster version is 4.15.0-0.nightly-2023-11-08-062604' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-08-062604 True False 85m Cluster version is 4.15.0-0.nightly-2023-11-08-062604' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/ui/render.sh ++ export SUITE_DIR=./tests/e2e/ui ++ SUITE_DIR=./tests/e2e/ui ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/ui ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test allinone + '[' 1 -ne 1 ']' + test_name=allinone + echo =========================================================================== =========================================================================== + info 'Rendering files for test allinone' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test allinone\e[0m' Rendering files for test allinone + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build + '[' _build '!=' _build ']' + mkdir -p allinone + cd allinone + export GET_URL_COMMAND + export URL + export JAEGER_NAME=all-in-one-ui + JAEGER_NAME=all-in-one-ui + '[' true = true ']' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./01-curl.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./04-test-ui-config.yaml + start_test production + '[' 1 -ne 1 ']' + test_name=production + echo =========================================================================== =========================================================================== + info 'Rendering files for test production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test production\e[0m' Rendering files for test production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build/allinone + '[' allinone '!=' _build ']' + cd .. + mkdir -p production + cd production + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + [[ true = true ]] + [[ true = true ]] + render_install_jaeger production-ui production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + '[' true = true ']' + INSECURE=true + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-forbbiden-access.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-curl.yaml + INSECURE=true + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./05-check-disabled-security.yaml + ASSERT_PRESENT=false + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./06-check-NO-gaID.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./08-check-gaID.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running ui E2E tests' Running ui E2E tests + cd tests/e2e/ui/_build + set +e + KUBECONFIG=/tmp/kubeconfig-894131460 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 3 tests === RUN kuttl/harness === RUN kuttl/harness/allinone === PAUSE kuttl/harness/allinone === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/production === PAUSE kuttl/harness/production === CONT kuttl/harness/allinone logger.go:42: 09:13:53 | allinone | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:13:53 | allinone | Creating namespace: kuttl-test-saved-honeybee logger.go:42: 09:13:53 | allinone/0-install | starting test step 0-install logger.go:42: 09:13:53 | allinone/0-install | Jaeger:kuttl-test-saved-honeybee/all-in-one-ui created logger.go:42: 09:13:57 | allinone/0-install | test step completed 0-install logger.go:42: 09:13:57 | allinone/1-curl | starting test step 1-curl logger.go:42: 09:13:57 | allinone/1-curl | running command: [./ensure-ingress-host.sh] logger.go:42: 09:13:57 | allinone/1-curl | Checking the Ingress host value was populated logger.go:42: 09:13:57 | allinone/1-curl | Try number 0 logger.go:42: 09:13:57 | allinone/1-curl | error: error executing jsonpath "{.items[0].status.ingress[0].host}": Error executing template: array index out of bounds: index 0, length 0. Printing more information for debugging the template: logger.go:42: 09:13:57 | allinone/1-curl | template was: logger.go:42: 09:13:57 | allinone/1-curl | {.items[0].status.ingress[0].host} logger.go:42: 09:13:57 | allinone/1-curl | object given to jsonpath engine was: logger.go:42: 09:13:57 | allinone/1-curl | map[string]interface {}{"apiVersion":"v1", "items":XXinterface {}{}, "kind":"List", "metadata":map[string]interface {}{"resourceVersion":""}} logger.go:42: 09:13:57 | allinone/1-curl | logger.go:42: 09:13:57 | allinone/1-curl | logger.go:42: 09:14:07 | allinone/1-curl | Try number 1 logger.go:42: 09:14:07 | allinone/1-curl | Hostname is all-in-one-ui-kuttl-test-saved-honeybee.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com logger.go:42: 09:14:07 | allinone/1-curl | running command: [sh -c ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE all-in-one-ui] logger.go:42: 09:14:07 | allinone/1-curl | Checking an expected HTTP response logger.go:42: 09:14:07 | allinone/1-curl | Running in OpenShift logger.go:42: 09:14:07 | allinone/1-curl | User not provided. Getting the token... logger.go:42: 09:14:09 | allinone/1-curl | Warning: resource jaegers/all-in-one-ui is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 09:14:15 | allinone/1-curl | Try number 1/30 the https://all-in-one-ui-kuttl-test-saved-honeybee.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:14:15 | allinone/1-curl | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 09:14:15 | allinone/1-curl | Try number 2/30 the https://all-in-one-ui-kuttl-test-saved-honeybee.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:14:15 | allinone/1-curl | curl response asserted properly logger.go:42: 09:14:15 | allinone/1-curl | test step completed 1-curl logger.go:42: 09:14:15 | allinone/2-delete | starting test step 2-delete logger.go:42: 09:14:15 | allinone/2-delete | Jaeger:kuttl-test-saved-honeybee/all-in-one-ui created logger.go:42: 09:14:15 | allinone/2-delete | test step completed 2-delete logger.go:42: 09:14:15 | allinone/3-install | starting test step 3-install logger.go:42: 09:14:15 | allinone/3-install | Jaeger:kuttl-test-saved-honeybee/all-in-one-ui updated logger.go:42: 09:14:15 | allinone/3-install | test step completed 3-install logger.go:42: 09:14:15 | allinone/4-test-ui-config | starting test step 4-test-ui-config logger.go:42: 09:14:15 | allinone/4-test-ui-config | running command: [./ensure-ingress-host.sh] logger.go:42: 09:14:15 | allinone/4-test-ui-config | Checking the Ingress host value was populated logger.go:42: 09:14:15 | allinone/4-test-ui-config | Try number 0 logger.go:42: 09:14:15 | allinone/4-test-ui-config | error: error executing jsonpath "{.items[0].status.ingress[0].host}": Error executing template: array index out of bounds: index 0, length 0. Printing more information for debugging the template: logger.go:42: 09:14:15 | allinone/4-test-ui-config | template was: logger.go:42: 09:14:15 | allinone/4-test-ui-config | {.items[0].status.ingress[0].host} logger.go:42: 09:14:15 | allinone/4-test-ui-config | object given to jsonpath engine was: logger.go:42: 09:14:15 | allinone/4-test-ui-config | map[string]interface {}{"apiVersion":"v1", "items":XXinterface {}{}, "kind":"List", "metadata":map[string]interface {}{"resourceVersion":""}} logger.go:42: 09:14:15 | allinone/4-test-ui-config | logger.go:42: 09:14:15 | allinone/4-test-ui-config | logger.go:42: 09:14:25 | allinone/4-test-ui-config | Try number 1 logger.go:42: 09:14:25 | allinone/4-test-ui-config | Hostname is all-in-one-ui-kuttl-test-saved-honeybee.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com logger.go:42: 09:14:25 | allinone/4-test-ui-config | running command: [sh -c ASSERT_PRESENT=true EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 09:14:26 | allinone/4-test-ui-config | time="2023-11-13T09:14:26Z" level=info msg="Querying https://all-in-one-ui-kuttl-test-saved-honeybee.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search..." logger.go:42: 09:14:26 | allinone/4-test-ui-config | time="2023-11-13T09:14:26Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 09:14:26 | allinone/4-test-ui-config | time="2023-11-13T09:14:26Z" level=info msg="Polling to https://all-in-one-ui-kuttl-test-saved-honeybee.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search" logger.go:42: 09:14:26 | allinone/4-test-ui-config | time="2023-11-13T09:14:26Z" level=info msg="Doing request number 0" logger.go:42: 09:14:26 | allinone/4-test-ui-config | time="2023-11-13T09:14:26Z" level=info msg="Content found and asserted!" logger.go:42: 09:14:26 | allinone/4-test-ui-config | time="2023-11-13T09:14:26Z" level=info msg="Success!" logger.go:42: 09:14:26 | allinone/4-test-ui-config | test step completed 4-test-ui-config logger.go:42: 09:14:26 | allinone | allinone events from ns kuttl-test-saved-honeybee: logger.go:42: 09:14:26 | allinone | 2023-11-13 09:13:56 +0000 UTC Normal Pod all-in-one-ui-74985c654-whdxg Binding Scheduled Successfully assigned kuttl-test-saved-honeybee/all-in-one-ui-74985c654-whdxg to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 09:14:26 | allinone | 2023-11-13 09:13:56 +0000 UTC Normal Pod all-in-one-ui-74985c654-whdxg AddedInterface Add eth0 [10.129.2.91/23] from ovn-kubernetes logger.go:42: 09:14:26 | allinone | 2023-11-13 09:13:56 +0000 UTC Normal Pod all-in-one-ui-74985c654-whdxg.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 09:14:26 | allinone | 2023-11-13 09:13:56 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-74985c654 SuccessfulCreate Created pod: all-in-one-ui-74985c654-whdxg replicaset-controller logger.go:42: 09:14:26 | allinone | 2023-11-13 09:13:56 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-74985c654 to 1 deployment-controller logger.go:42: 09:14:26 | allinone | 2023-11-13 09:13:57 +0000 UTC Normal Pod all-in-one-ui-74985c654-whdxg.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:14:26 | allinone | 2023-11-13 09:13:57 +0000 UTC Normal Pod all-in-one-ui-74985c654-whdxg.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:14:26 | allinone | 2023-11-13 09:13:57 +0000 UTC Normal Pod all-in-one-ui-74985c654-whdxg.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 09:14:26 | allinone | 2023-11-13 09:13:57 +0000 UTC Normal Pod all-in-one-ui-74985c654-whdxg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:14:26 | allinone | 2023-11-13 09:13:57 +0000 UTC Normal Pod all-in-one-ui-74985c654-whdxg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:10 +0000 UTC Normal Pod all-in-one-ui-74985c654-whdxg.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:10 +0000 UTC Normal Pod all-in-one-ui-74985c654-whdxg.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:10 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-74985c654 SuccessfulDelete Deleted pod: all-in-one-ui-74985c654-whdxg replicaset-controller logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:10 +0000 UTC Normal Pod all-in-one-ui-797bdbdffd-twt8d Binding Scheduled Successfully assigned kuttl-test-saved-honeybee/all-in-one-ui-797bdbdffd-twt8d to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:10 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-797bdbdffd SuccessfulCreate Created pod: all-in-one-ui-797bdbdffd-twt8d replicaset-controller logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:10 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled down replica set all-in-one-ui-74985c654 to 0 from 1 deployment-controller logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:10 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-797bdbdffd to 1 deployment-controller logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:11 +0000 UTC Normal Pod all-in-one-ui-797bdbdffd-twt8d AddedInterface Add eth0 [10.131.0.117/23] from ovn-kubernetes logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:11 +0000 UTC Normal Pod all-in-one-ui-797bdbdffd-twt8d.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:11 +0000 UTC Normal Pod all-in-one-ui-797bdbdffd-twt8d.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:11 +0000 UTC Normal Pod all-in-one-ui-797bdbdffd-twt8d.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:11 +0000 UTC Normal Pod all-in-one-ui-797bdbdffd-twt8d.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:11 +0000 UTC Normal Pod all-in-one-ui-797bdbdffd-twt8d.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:11 +0000 UTC Normal Pod all-in-one-ui-797bdbdffd-twt8d.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:15 +0000 UTC Normal Pod all-in-one-ui-797bdbdffd-twt8d.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:15 +0000 UTC Normal Pod all-in-one-ui-797bdbdffd-twt8d.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:19 +0000 UTC Normal Pod all-in-one-ui-569db74695-ngtzg Binding Scheduled Successfully assigned kuttl-test-saved-honeybee/all-in-one-ui-569db74695-ngtzg to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:19 +0000 UTC Normal Pod all-in-one-ui-569db74695-ngtzg AddedInterface Add eth0 [10.131.0.118/23] from ovn-kubernetes logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:19 +0000 UTC Normal Pod all-in-one-ui-569db74695-ngtzg.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:19 +0000 UTC Normal Pod all-in-one-ui-569db74695-ngtzg.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:19 +0000 UTC Normal Pod all-in-one-ui-569db74695-ngtzg.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:19 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-569db74695 SuccessfulCreate Created pod: all-in-one-ui-569db74695-ngtzg replicaset-controller logger.go:42: 09:14:26 | allinone | 2023-11-13 09:14:19 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-569db74695 to 1 deployment-controller logger.go:42: 09:14:26 | allinone | Deleting namespace: kuttl-test-saved-honeybee === CONT kuttl/harness/production logger.go:42: 09:14:32 | production | Ignoring add-tracking-id.yaml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:14:32 | production | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:14:32 | production | Creating namespace: kuttl-test-bright-cod logger.go:42: 09:14:32 | production/1-install | starting test step 1-install logger.go:42: 09:14:32 | production/1-install | Jaeger:kuttl-test-bright-cod/production-ui created logger.go:42: 09:15:07 | production/1-install | test step completed 1-install logger.go:42: 09:15:07 | production/2-check-forbbiden-access | starting test step 2-check-forbbiden-access logger.go:42: 09:15:07 | production/2-check-forbbiden-access | running command: [./ensure-ingress-host.sh] logger.go:42: 09:15:07 | production/2-check-forbbiden-access | Checking the Ingress host value was populated logger.go:42: 09:15:07 | production/2-check-forbbiden-access | Try number 0 logger.go:42: 09:15:07 | production/2-check-forbbiden-access | Hostname is production-ui-kuttl-test-bright-cod.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com logger.go:42: 09:15:07 | production/2-check-forbbiden-access | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE production-ui] logger.go:42: 09:15:08 | production/2-check-forbbiden-access | Checking an expected HTTP response logger.go:42: 09:15:08 | production/2-check-forbbiden-access | Running in OpenShift logger.go:42: 09:15:08 | production/2-check-forbbiden-access | Not using any secret logger.go:42: 09:15:08 | production/2-check-forbbiden-access | Try number 1/30 the https://production-ui-kuttl-test-bright-cod.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:15:08 | production/2-check-forbbiden-access | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 09:15:08 | production/2-check-forbbiden-access | Try number 2/30 the https://production-ui-kuttl-test-bright-cod.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:15:08 | production/2-check-forbbiden-access | HTTP response is 503. 403 expected. Waiting 10 s logger.go:42: 09:15:18 | production/2-check-forbbiden-access | Try number 3/30 the https://production-ui-kuttl-test-bright-cod.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:15:18 | production/2-check-forbbiden-access | curl response asserted properly logger.go:42: 09:15:18 | production/2-check-forbbiden-access | test step completed 2-check-forbbiden-access logger.go:42: 09:15:18 | production/3-curl | starting test step 3-curl logger.go:42: 09:15:18 | production/3-curl | running command: [./ensure-ingress-host.sh] logger.go:42: 09:15:18 | production/3-curl | Checking the Ingress host value was populated logger.go:42: 09:15:18 | production/3-curl | Try number 0 logger.go:42: 09:15:18 | production/3-curl | Hostname is production-ui-kuttl-test-bright-cod.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com logger.go:42: 09:15:18 | production/3-curl | running command: [sh -c ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE production-ui] logger.go:42: 09:15:18 | production/3-curl | Checking an expected HTTP response logger.go:42: 09:15:18 | production/3-curl | Running in OpenShift logger.go:42: 09:15:18 | production/3-curl | User not provided. Getting the token... logger.go:42: 09:15:19 | production/3-curl | Warning: resource jaegers/production-ui is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 09:15:26 | production/3-curl | Try number 1/30 the https://production-ui-kuttl-test-bright-cod.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:15:26 | production/3-curl | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 09:15:26 | production/3-curl | Try number 2/30 the https://production-ui-kuttl-test-bright-cod.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:15:26 | production/3-curl | curl response asserted properly logger.go:42: 09:15:26 | production/3-curl | test step completed 3-curl logger.go:42: 09:15:26 | production/4-install | starting test step 4-install logger.go:42: 09:15:26 | production/4-install | Jaeger:kuttl-test-bright-cod/production-ui updated logger.go:42: 09:15:26 | production/4-install | test step completed 4-install logger.go:42: 09:15:26 | production/5-check-disabled-security | starting test step 5-check-disabled-security logger.go:42: 09:15:26 | production/5-check-disabled-security | running command: [./ensure-ingress-host.sh] logger.go:42: 09:15:26 | production/5-check-disabled-security | Checking the Ingress host value was populated logger.go:42: 09:15:26 | production/5-check-disabled-security | Try number 0 logger.go:42: 09:15:26 | production/5-check-disabled-security | Hostname is production-ui-kuttl-test-bright-cod.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com logger.go:42: 09:15:26 | production/5-check-disabled-security | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE production-ui] logger.go:42: 09:15:26 | production/5-check-disabled-security | Checking an expected HTTP response logger.go:42: 09:15:26 | production/5-check-disabled-security | Running in OpenShift logger.go:42: 09:15:26 | production/5-check-disabled-security | Not using any secret logger.go:42: 09:15:26 | production/5-check-disabled-security | Try number 1/30 the https://production-ui-kuttl-test-bright-cod.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:15:26 | production/5-check-disabled-security | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 09:15:26 | production/5-check-disabled-security | Try number 2/30 the https://production-ui-kuttl-test-bright-cod.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:15:26 | production/5-check-disabled-security | HTTP response is 403. 200 expected. Waiting 10 s logger.go:42: 09:15:36 | production/5-check-disabled-security | Try number 3/30 the https://production-ui-kuttl-test-bright-cod.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:15:36 | production/5-check-disabled-security | curl response asserted properly logger.go:42: 09:15:36 | production/5-check-disabled-security | test step completed 5-check-disabled-security logger.go:42: 09:15:36 | production/6-check-NO-gaID | starting test step 6-check-NO-gaID logger.go:42: 09:15:36 | production/6-check-NO-gaID | running command: [./ensure-ingress-host.sh] logger.go:42: 09:15:36 | production/6-check-NO-gaID | Checking the Ingress host value was populated logger.go:42: 09:15:36 | production/6-check-NO-gaID | Try number 0 logger.go:42: 09:15:36 | production/6-check-NO-gaID | Hostname is production-ui-kuttl-test-bright-cod.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com logger.go:42: 09:15:36 | production/6-check-NO-gaID | running command: [sh -c ASSERT_PRESENT=false EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 09:15:37 | production/6-check-NO-gaID | time="2023-11-13T09:15:37Z" level=info msg="Querying https://production-ui-kuttl-test-bright-cod.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search..." logger.go:42: 09:15:37 | production/6-check-NO-gaID | time="2023-11-13T09:15:37Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 09:15:37 | production/6-check-NO-gaID | time="2023-11-13T09:15:37Z" level=info msg="Polling to https://production-ui-kuttl-test-bright-cod.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search" logger.go:42: 09:15:37 | production/6-check-NO-gaID | time="2023-11-13T09:15:37Z" level=info msg="Doing request number 0" logger.go:42: 09:15:37 | production/6-check-NO-gaID | time="2023-11-13T09:15:37Z" level=info msg="Content not found and asserted it was not found!" logger.go:42: 09:15:37 | production/6-check-NO-gaID | time="2023-11-13T09:15:37Z" level=info msg="Success!" logger.go:42: 09:15:37 | production/6-check-NO-gaID | test step completed 6-check-NO-gaID logger.go:42: 09:15:37 | production/7-add-tracking-id | starting test step 7-add-tracking-id logger.go:42: 09:15:37 | production/7-add-tracking-id | running command: [sh -c kubectl apply -f add-tracking-id.yaml -n $NAMESPACE] logger.go:42: 09:15:37 | production/7-add-tracking-id | jaeger.jaegertracing.io/production-ui configured logger.go:42: 09:15:37 | production/7-add-tracking-id | test step completed 7-add-tracking-id logger.go:42: 09:15:37 | production/8-check-gaID | starting test step 8-check-gaID logger.go:42: 09:15:37 | production/8-check-gaID | running command: [./ensure-ingress-host.sh] logger.go:42: 09:15:37 | production/8-check-gaID | Checking the Ingress host value was populated logger.go:42: 09:15:37 | production/8-check-gaID | Try number 0 logger.go:42: 09:15:37 | production/8-check-gaID | Hostname is production-ui-kuttl-test-bright-cod.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com logger.go:42: 09:15:37 | production/8-check-gaID | running command: [sh -c ASSERT_PRESENT=true EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 09:15:38 | production/8-check-gaID | time="2023-11-13T09:15:38Z" level=info msg="Querying https://production-ui-kuttl-test-bright-cod.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search..." logger.go:42: 09:15:38 | production/8-check-gaID | time="2023-11-13T09:15:38Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 09:15:38 | production/8-check-gaID | time="2023-11-13T09:15:38Z" level=info msg="Polling to https://production-ui-kuttl-test-bright-cod.apps.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com/search" logger.go:42: 09:15:38 | production/8-check-gaID | time="2023-11-13T09:15:38Z" level=info msg="Doing request number 0" logger.go:42: 09:15:38 | production/8-check-gaID | time="2023-11-13T09:15:38Z" level=warning msg="Found: false . Assert: true" logger.go:42: 09:15:38 | production/8-check-gaID | time="2023-11-13T09:15:38Z" level=warning msg="The condition of the test function was not accomplished" logger.go:42: 09:15:38 | production/8-check-gaID | time="2023-11-13T09:15:38Z" level=info msg="Doing request number 1" logger.go:42: 09:15:38 | production/8-check-gaID | time="2023-11-13T09:15:38Z" level=warning msg="Found: false . Assert: true" logger.go:42: 09:15:38 | production/8-check-gaID | time="2023-11-13T09:15:38Z" level=warning msg="The condition of the test function was not accomplished" logger.go:42: 09:15:46 | production/8-check-gaID | time="2023-11-13T09:15:46Z" level=info msg="Doing request number 2" logger.go:42: 09:15:46 | production/8-check-gaID | time="2023-11-13T09:15:46Z" level=info msg="Content found and asserted!" logger.go:42: 09:15:46 | production/8-check-gaID | time="2023-11-13T09:15:46Z" level=info msg="Success!" logger.go:42: 09:15:46 | production/8-check-gaID | test step completed 8-check-gaID logger.go:42: 09:15:46 | production | production events from ns kuttl-test-bright-cod: logger.go:42: 09:15:46 | production | 2023-11-13 09:14:38 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrightcodproductionui-1-78c84cb72jxl Binding Scheduled Successfully assigned kuttl-test-bright-cod/elasticsearch-cdm-kuttltestbrightcodproductionui-1-78c84cb72jxl to ip-10-0-91-77.us-east-2.compute.internal default-scheduler logger.go:42: 09:15:46 | production | 2023-11-13 09:14:38 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestbrightcodproductionui-1-78c84cbf6 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestbrightcodproductionui-1-78c84cb72jxl replicaset-controller logger.go:42: 09:15:46 | production | 2023-11-13 09:14:38 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestbrightcodproductionui-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestbrightcodproductionui-1-78c84cbf6 to 1 deployment-controller logger.go:42: 09:15:46 | production | 2023-11-13 09:14:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrightcodproductionui-1-78c84cb72jxl AddedInterface Add eth0 [10.128.2.56/23] from ovn-kubernetes logger.go:42: 09:15:46 | production | 2023-11-13 09:14:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrightcodproductionui-1-78c84cb72jxl.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:14:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrightcodproductionui-1-78c84cb72jxl.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:14:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrightcodproductionui-1-78c84cb72jxl.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:14:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrightcodproductionui-1-78c84cb72jxl.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:14:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrightcodproductionui-1-78c84cb72jxl.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:14:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrightcodproductionui-1-78c84cb72jxl.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:14:49 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestbrightcodproductionui-1-78c84cb72jxl.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:14:54 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestbrightcodproductionui-1-78c84cb72jxl.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:04 +0000 UTC Normal Pod production-ui-collector-5b7c4bd9bb-k5gq7 Binding Scheduled Successfully assigned kuttl-test-bright-cod/production-ui-collector-5b7c4bd9bb-k5gq7 to ip-10-0-25-202.us-east-2.compute.internal default-scheduler logger.go:42: 09:15:46 | production | 2023-11-13 09:15:04 +0000 UTC Normal ReplicaSet.apps production-ui-collector-5b7c4bd9bb SuccessfulCreate Created pod: production-ui-collector-5b7c4bd9bb-k5gq7 replicaset-controller logger.go:42: 09:15:46 | production | 2023-11-13 09:15:04 +0000 UTC Normal Deployment.apps production-ui-collector ScalingReplicaSet Scaled up replica set production-ui-collector-5b7c4bd9bb to 1 deployment-controller logger.go:42: 09:15:46 | production | 2023-11-13 09:15:04 +0000 UTC Normal Pod production-ui-query-696c4874bb-49dml Binding Scheduled Successfully assigned kuttl-test-bright-cod/production-ui-query-696c4874bb-49dml to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 09:15:46 | production | 2023-11-13 09:15:04 +0000 UTC Normal ReplicaSet.apps production-ui-query-696c4874bb SuccessfulCreate Created pod: production-ui-query-696c4874bb-49dml replicaset-controller logger.go:42: 09:15:46 | production | 2023-11-13 09:15:04 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-696c4874bb to 1 deployment-controller logger.go:42: 09:15:46 | production | 2023-11-13 09:15:05 +0000 UTC Normal Pod production-ui-collector-5b7c4bd9bb-k5gq7 AddedInterface Add eth0 [10.129.2.92/23] from ovn-kubernetes logger.go:42: 09:15:46 | production | 2023-11-13 09:15:05 +0000 UTC Normal Pod production-ui-collector-5b7c4bd9bb-k5gq7.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:05 +0000 UTC Normal Pod production-ui-collector-5b7c4bd9bb-k5gq7.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:05 +0000 UTC Normal Pod production-ui-collector-5b7c4bd9bb-k5gq7.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:05 +0000 UTC Normal Pod production-ui-query-696c4874bb-49dml AddedInterface Add eth0 [10.131.0.119/23] from ovn-kubernetes logger.go:42: 09:15:46 | production | 2023-11-13 09:15:05 +0000 UTC Normal Pod production-ui-query-696c4874bb-49dml.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:05 +0000 UTC Normal Pod production-ui-query-696c4874bb-49dml.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:05 +0000 UTC Normal Pod production-ui-query-696c4874bb-49dml.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:05 +0000 UTC Normal Pod production-ui-query-696c4874bb-49dml.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:05 +0000 UTC Normal Pod production-ui-query-696c4874bb-49dml.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:05 +0000 UTC Normal Pod production-ui-query-696c4874bb-49dml.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:05 +0000 UTC Normal Pod production-ui-query-696c4874bb-49dml.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:05 +0000 UTC Normal Pod production-ui-query-696c4874bb-49dml.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:05 +0000 UTC Normal Pod production-ui-query-696c4874bb-49dml.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:19 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:15:46 | production | 2023-11-13 09:15:19 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:15:46 | production | 2023-11-13 09:15:19 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:15:46 | production | 2023-11-13 09:15:21 +0000 UTC Normal Pod production-ui-query-696c4874bb-49dml.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:21 +0000 UTC Normal Pod production-ui-query-696c4874bb-49dml.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:21 +0000 UTC Normal Pod production-ui-query-696c4874bb-49dml.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:21 +0000 UTC Normal ReplicaSet.apps production-ui-query-696c4874bb SuccessfulDelete Deleted pod: production-ui-query-696c4874bb-49dml replicaset-controller logger.go:42: 09:15:46 | production | 2023-11-13 09:15:21 +0000 UTC Normal Pod production-ui-query-7766b7f659-nl52l Binding Scheduled Successfully assigned kuttl-test-bright-cod/production-ui-query-7766b7f659-nl52l to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 09:15:46 | production | 2023-11-13 09:15:21 +0000 UTC Normal ReplicaSet.apps production-ui-query-7766b7f659 SuccessfulCreate Created pod: production-ui-query-7766b7f659-nl52l replicaset-controller logger.go:42: 09:15:46 | production | 2023-11-13 09:15:21 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-696c4874bb to 0 from 1 deployment-controller logger.go:42: 09:15:46 | production | 2023-11-13 09:15:21 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-7766b7f659 to 1 deployment-controller logger.go:42: 09:15:46 | production | 2023-11-13 09:15:22 +0000 UTC Normal Pod production-ui-query-7766b7f659-nl52l AddedInterface Add eth0 [10.131.0.120/23] from ovn-kubernetes logger.go:42: 09:15:46 | production | 2023-11-13 09:15:22 +0000 UTC Normal Pod production-ui-query-7766b7f659-nl52l.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:22 +0000 UTC Normal Pod production-ui-query-7766b7f659-nl52l.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:22 +0000 UTC Normal Pod production-ui-query-7766b7f659-nl52l.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:22 +0000 UTC Normal Pod production-ui-query-7766b7f659-nl52l.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:22 +0000 UTC Normal Pod production-ui-query-7766b7f659-nl52l.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:22 +0000 UTC Normal Pod production-ui-query-7766b7f659-nl52l.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:22 +0000 UTC Normal Pod production-ui-query-7766b7f659-nl52l.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:22 +0000 UTC Normal Pod production-ui-query-7766b7f659-nl52l.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:22 +0000 UTC Normal Pod production-ui-query-7766b7f659-nl52l.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:30 +0000 UTC Normal Pod production-ui-query-7766b7f659-nl52l.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:30 +0000 UTC Normal Pod production-ui-query-7766b7f659-nl52l.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:30 +0000 UTC Normal Pod production-ui-query-7766b7f659-nl52l.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:30 +0000 UTC Normal ReplicaSet.apps production-ui-query-7766b7f659 SuccessfulDelete Deleted pod: production-ui-query-7766b7f659-nl52l replicaset-controller logger.go:42: 09:15:46 | production | 2023-11-13 09:15:30 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-7766b7f659 to 0 from 1 deployment-controller logger.go:42: 09:15:46 | production | 2023-11-13 09:15:31 +0000 UTC Normal Pod production-ui-query-6ffb6df884-9b4g2 Binding Scheduled Successfully assigned kuttl-test-bright-cod/production-ui-query-6ffb6df884-9b4g2 to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 09:15:46 | production | 2023-11-13 09:15:31 +0000 UTC Normal ReplicaSet.apps production-ui-query-6ffb6df884 SuccessfulCreate Created pod: production-ui-query-6ffb6df884-9b4g2 replicaset-controller logger.go:42: 09:15:46 | production | 2023-11-13 09:15:31 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-6ffb6df884 to 1 deployment-controller logger.go:42: 09:15:46 | production | 2023-11-13 09:15:32 +0000 UTC Normal Pod production-ui-query-6ffb6df884-9b4g2 AddedInterface Add eth0 [10.131.0.121/23] from ovn-kubernetes logger.go:42: 09:15:46 | production | 2023-11-13 09:15:32 +0000 UTC Normal Pod production-ui-query-6ffb6df884-9b4g2.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:32 +0000 UTC Normal Pod production-ui-query-6ffb6df884-9b4g2.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:32 +0000 UTC Normal Pod production-ui-query-6ffb6df884-9b4g2.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:32 +0000 UTC Normal Pod production-ui-query-6ffb6df884-9b4g2.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:32 +0000 UTC Normal Pod production-ui-query-6ffb6df884-9b4g2.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:32 +0000 UTC Normal Pod production-ui-query-6ffb6df884-9b4g2.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:38 +0000 UTC Normal Pod production-ui-query-6ffb6df884-9b4g2.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:38 +0000 UTC Normal Pod production-ui-query-6ffb6df884-9b4g2.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:38 +0000 UTC Normal ReplicaSet.apps production-ui-query-6ffb6df884 SuccessfulDelete Deleted pod: production-ui-query-6ffb6df884-9b4g2 replicaset-controller logger.go:42: 09:15:46 | production | 2023-11-13 09:15:38 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-6ffb6df884 to 0 from 1 deployment-controller logger.go:42: 09:15:46 | production | 2023-11-13 09:15:39 +0000 UTC Normal Pod production-ui-query-649bf86b8c-k5lcg Binding Scheduled Successfully assigned kuttl-test-bright-cod/production-ui-query-649bf86b8c-k5lcg to ip-10-0-72-163.us-east-2.compute.internal default-scheduler logger.go:42: 09:15:46 | production | 2023-11-13 09:15:39 +0000 UTC Normal ReplicaSet.apps production-ui-query-649bf86b8c SuccessfulCreate Created pod: production-ui-query-649bf86b8c-k5lcg replicaset-controller logger.go:42: 09:15:46 | production | 2023-11-13 09:15:39 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-649bf86b8c to 1 deployment-controller logger.go:42: 09:15:46 | production | 2023-11-13 09:15:40 +0000 UTC Normal Pod production-ui-query-649bf86b8c-k5lcg AddedInterface Add eth0 [10.131.0.122/23] from ovn-kubernetes logger.go:42: 09:15:46 | production | 2023-11-13 09:15:40 +0000 UTC Normal Pod production-ui-query-649bf86b8c-k5lcg.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:40 +0000 UTC Normal Pod production-ui-query-649bf86b8c-k5lcg.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:40 +0000 UTC Normal Pod production-ui-query-649bf86b8c-k5lcg.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:40 +0000 UTC Normal Pod production-ui-query-649bf86b8c-k5lcg.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:40 +0000 UTC Normal Pod production-ui-query-649bf86b8c-k5lcg.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:15:46 | production | 2023-11-13 09:15:40 +0000 UTC Normal Pod production-ui-query-649bf86b8c-k5lcg.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:15:46 | production | Deleting namespace: kuttl-test-bright-cod === CONT kuttl/harness/artifacts logger.go:42: 09:15:52 | artifacts | Creating namespace: kuttl-test-useful-vervet logger.go:42: 09:15:52 | artifacts | artifacts events from ns kuttl-test-useful-vervet: logger.go:42: 09:15:52 | artifacts | Deleting namespace: kuttl-test-useful-vervet === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (125.45s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/allinone (39.66s) --- PASS: kuttl/harness/production (79.82s) --- PASS: kuttl/harness/artifacts (5.81s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name ui --report --output /logs/artifacts/ui.xml ./artifacts/kuttl-report.xml time="2023-11-13T09:15:58Z" level=debug msg="Setting a new name for the test suites" time="2023-11-13T09:15:58Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-11-13T09:15:58Z" level=debug msg="normalizing test case names" time="2023-11-13T09:15:58Z" level=debug msg="ui/allinone -> ui_allinone" time="2023-11-13T09:15:58Z" level=debug msg="ui/production -> ui_production" time="2023-11-13T09:15:58Z" level=debug msg="ui/artifacts -> ui_artifacts" +---------------+--------+ | NAME | RESULT | +---------------+--------+ | ui_allinone | passed | | ui_production | passed | | ui_artifacts | passed | +---------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 1 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/sidecar.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/streaming.xml + '[' 3 -gt 0 ']' + count=2 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/ui.xml + '[' 0 -gt 0 ']' + '[' 2 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh upgrade false true + '[' 3 -ne 3 ']' + test_suite_name=upgrade + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/upgrade.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-upgrade make[2]: Entering directory '/tmp/jaeger-tests' make docker JAEGER_VERSION=1.49.1 IMG="quay.io//jaeger-operator:next" make[3]: Entering directory '/tmp/jaeger-tests' [ ! -z "true" ] || docker build --build-arg=GOPROXY= --build-arg=VERSION="1.49.0" --build-arg=JAEGER_VERSION=1.49.1 --build-arg=TARGETARCH= --build-arg VERSION_DATE=2023-11-13T09:15:58Z --build-arg VERSION_PKG="github.com/jaegertracing/jaeger-operator/pkg/version" -t "quay.io//jaeger-operator:next" . make[3]: Leaving directory '/tmp/jaeger-tests' touch build-e2e-upgrade-image SKIP_ES_EXTERNAL=true IMG=quay.io//jaeger-operator:"1.49.0" JAEGER_OPERATOR_VERSION="1.49.0" JAEGER_VERSION="1.49.0" ./tests/e2e/upgrade/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-08-062604 True False 87m Cluster version is 4.15.0-0.nightly-2023-11-08-062604' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-08-062604 True False 87m Cluster version is 4.15.0-0.nightly-2023-11-08-062604' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/upgrade/render.sh ++ export SUITE_DIR=./tests/e2e/upgrade ++ SUITE_DIR=./tests/e2e/upgrade ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/upgrade ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + export JAEGER_NAME + '[' true = true ']' + skip_test upgrade 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade + warning 'upgrade: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade: Test not supported in OpenShift\e[0m' WAR: upgrade: Test not supported in OpenShift + '[' true = true ']' + skip_test upgrade-from-latest-release 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade-from-latest-release + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade-from-latest-release + warning 'upgrade-from-latest-release: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade-from-latest-release: Test not supported in OpenShift\e[0m' WAR: upgrade-from-latest-release: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running upgrade E2E tests' Running upgrade E2E tests + cd tests/e2e/upgrade/_build + set +e + KUBECONFIG=/tmp/kubeconfig-894131460 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-tdvkiyh8-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 09:15:59 | artifacts | Creating namespace: kuttl-test-meet-swine logger.go:42: 09:16:00 | artifacts | artifacts events from ns kuttl-test-meet-swine: logger.go:42: 09:16:00 | artifacts | Deleting namespace: kuttl-test-meet-swine === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (5.83s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.68s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name upgrade --report --output /logs/artifacts/upgrade.xml ./artifacts/kuttl-report.xml time="2023-11-13T09:16:05Z" level=debug msg="Setting a new name for the test suites" time="2023-11-13T09:16:05Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-11-13T09:16:05Z" level=debug msg="normalizing test case names" time="2023-11-13T09:16:05Z" level=debug msg="upgrade/artifacts -> upgrade_artifacts" +-------------------+--------+ | NAME | RESULT | +-------------------+--------+ | upgrade_artifacts | passed | +-------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 1 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/sidecar.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/streaming.xml + '[' 3 -gt 0 ']' + count=2 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/ui.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/upgrade.xml + '[' 0 -gt 0 ']' + '[' 2 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests'