% Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 3831 100 3831 0 0 28731 0 --:--:-- --:--:-- --:--:-- 28804 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 2144 100 2144 0 0 23063 0 --:--:-- --:--:-- --:--:-- 23304 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 350 100 350 0 0 3368 0 --:--:-- --:--:-- --:--:-- 3365 100 350 100 350 0 0 3366 0 --:--:-- --:--:-- --:--:-- 3365 Installing kuttl Try 0... curl -sLo /tmp/jaeger-tests/hack/install/../../bin/kubectl-kuttl https://github.com/kudobuilder/kuttl/releases/download/v0.15.0/kubectl-kuttl_0.15.0_linux_x86_64 KUBECONFIG file is: /tmp/kubeconfig-2270117936 for suite in elasticsearch examples generate miscellaneous sidecar streaming ui upgrade; do \ make run-e2e-tests-$suite ; \ done make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh elasticsearch false true + '[' 3 -ne 3 ']' + test_suite_name=elasticsearch + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/elasticsearch.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-elasticsearch make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true \ KAFKA_VERSION=0.32.0 \ SKIP_KAFKA=false \ ./tests/e2e/elasticsearch/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-24-044110 True False 7m47s Cluster version is 4.14.0-0.nightly-2023-09-24-044110' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-24-044110 True False 7m47s Cluster version is 4.14.0-0.nightly-2023-09-24-044110' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 0.32.0 ']' ++ version_le 0.32.0 0.25.0 +++ echo 0.32.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 0.32.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/elasticsearch/render.sh ++ export SUITE_DIR=./tests/e2e/elasticsearch ++ SUITE_DIR=./tests/e2e/elasticsearch ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/elasticsearch ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + start_test es-from-aio-to-production + '[' 1 -ne 1 ']' + test_name=es-from-aio-to-production + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-from-aio-to-production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-from-aio-to-production\e[0m' Rendering files for test es-from-aio-to-production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-from-aio-to-production + cd es-from-aio-to-production + jaeger_name=my-jaeger + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 03 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=03 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./03-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch.redundancyPolicy="ZeroRedundancy"' ./03-install.yaml + render_smoke_test my-jaeger true 04 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=04 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./04-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test es-increasing-replicas + '[' 1 -ne 1 ']' + test_name=es-increasing-replicas + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-increasing-replicas' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-increasing-replicas\e[0m' Rendering files for test es-increasing-replicas + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-from-aio-to-production + '[' es-from-aio-to-production '!=' _build ']' + cd .. + mkdir -p es-increasing-replicas + cd es-increasing-replicas + jaeger_name=simple-prod + '[' true = true ']' + jaeger_deployment_mode=production_autoprovisioned + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.replicas=2 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.query.replicas=2 ./02-install.yaml + cp ./01-assert.yaml ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.replicas=2 ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .status.readyReplicas=2 ./02-assert.yaml + render_smoke_test simple-prod true 03 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=03 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./03-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + cp ./02-install.yaml ./04-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.elasticsearch.nodeCount=2 ./04-install.yaml + /tmp/jaeger-tests/bin/gomplate -f ./openshift-check-es-nodes.yaml.template -o ./05-check-es-nodes.yaml + '[' true = true ']' + skip_test es-index-cleaner-upstream 'SKIP_ES_EXTERNAL is true' + '[' 2 -ne 2 ']' + test_name=es-index-cleaner-upstream + message='SKIP_ES_EXTERNAL is true' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-increasing-replicas + '[' es-increasing-replicas '!=' _build ']' + cd .. + rm -rf es-index-cleaner-upstream + warning 'es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true\e[0m' WAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true + '[' true = true ']' + es_index_cleaner -autoprov production_autoprovisioned + '[' 2 -ne 2 ']' + postfix=-autoprov + jaeger_deployment_strategy=production_autoprovisioned + start_test es-index-cleaner-autoprov + '[' 1 -ne 1 ']' + test_name=es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-index-cleaner-autoprov' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-index-cleaner-autoprov\e[0m' Rendering files for test es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-index-cleaner-autoprov + cd es-index-cleaner-autoprov + jaeger_name=test-es-index-cleaner-with-prefix + cronjob_name=test-es-index-cleaner-with-prefix-es-index-cleaner + secured_es_connection=false + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_managed_es ']' + ELASTICSEARCH_URL=https://elasticsearch + secured_es_connection=true + cp ../../es-index-cleaner-upstream/04-assert.yaml ../../es-index-cleaner-upstream/README.md . + render_install_jaeger test-es-index-cleaner-with-prefix production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options.es.index-prefix=""' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.enabled=false ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.numberOfDays=0 ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.esIndexCleaner.schedule="*/1 * * * *"' ./01-install.yaml + render_report_spans test-es-index-cleaner-with-prefix true 5 00 true 02 + '[' 6 -ne 6 ']' + jaeger=test-es-index-cleaner-with-prefix + is_secured=true + number_of_spans=5 + job_number=00 + ensure_reported_spans=true + test_step=02 + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + export JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + export JOB_NUMBER=00 + JOB_NUMBER=00 + export DAYS=5 + DAYS=5 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./02-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./02-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + sed 's~enabled: false~enabled: true~gi' ./01-install.yaml + CRONJOB_NAME=test-es-index-cleaner-with-prefix-es-index-cleaner + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/wait-for-cronjob-execution.yaml.template -o ./04-wait-es-index-cleaner.yaml + /tmp/jaeger-tests/bin/gomplate -f ./01-install.yaml -o ./05-install.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' 00 06 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + job_number=00 + test_step=06 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=test-es-index-cleaner-with-prefix-curator + JOB_NUMBER=00 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + MOUNT_SECRET=test-es-index-cleaner-with-prefix-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./06-check-indices.yaml + JOB_NUMBER=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./06-assert.yaml + '[' true = true ']' + get_elasticsearch_openshift_operator_version + export ESO_OPERATOR_VERSION + '[' true = true ']' ++ kubectl get pods -l name=elasticsearch-operator --all-namespaces '-o=jsonpath={.items[0].metadata.annotations.operatorframework\.io/properties}' + properties='{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.6"}}]}' + '[' -z '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.6"}}]}' ']' ++ echo '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.6"}}]}' ++ /tmp/jaeger-tests/bin/yq e -P '.properties.[] | select(.value.packageName == "elasticsearch-operator") | .value.version' + ESO_OPERATOR_VERSION=5.7.6 ++ version_ge 5.7.6 5.4 +++ echo 5.7.6 5.4 +++ tr ' ' '\n' +++ sort -rV +++ head -n 1 ++ test 5.7.6 == 5.7.6 + '[' -n '' ']' + skip_test es-index-cleaner-managed 'Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 2 -ne 2 ']' + test_name=es-index-cleaner-managed + message='Test only supported with Elasticsearch OpenShift Operator >= 5.4' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-index-cleaner-autoprov + '[' es-index-cleaner-autoprov '!=' _build ']' + cd .. + rm -rf es-index-cleaner-managed + warning 'es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4\e[0m' WAR: es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4 + '[' true = true ']' + start_test es-multiinstance + '[' 1 -ne 1 ']' + test_name=es-multiinstance + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-multiinstance' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-multiinstance\e[0m' Rendering files for test es-multiinstance + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-multiinstance + cd es-multiinstance + jaeger_name=instance-1 + render_install_jaeger instance-1 production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=instance-1 + JAEGER_NAME=instance-1 + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/gomplate -f ./03-create-second-instance.yaml.template -o 03-create-second-instance.yaml + '[' true = true ']' + skip_test es-rollover-upstream 'SKIP_ES_EXTERNAL is true' + '[' 2 -ne 2 ']' + test_name=es-rollover-upstream + message='SKIP_ES_EXTERNAL is true' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-multiinstance + '[' es-multiinstance '!=' _build ']' + cd .. + rm -rf es-rollover-upstream + warning 'es-rollover-upstream: SKIP_ES_EXTERNAL is true' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-rollover-upstream: SKIP_ES_EXTERNAL is true\e[0m' WAR: es-rollover-upstream: SKIP_ES_EXTERNAL is true + '[' true = true ']' + es_rollover -autoprov production_autoprovisioned + '[' 2 -ne 2 ']' + postfix=-autoprov + jaeger_deployment_strategy=production_autoprovisioned + start_test es-rollover-autoprov + '[' 1 -ne 1 ']' + test_name=es-rollover-autoprov + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-rollover-autoprov' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-rollover-autoprov\e[0m' Rendering files for test es-rollover-autoprov + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-rollover-autoprov + cd es-rollover-autoprov + cp ../../es-rollover-upstream/05-assert.yaml ../../es-rollover-upstream/05-install.yaml ../../es-rollover-upstream/README.md . + jaeger_name=my-jaeger + secured_es_connection=false + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_managed_es ']' + ELASTICSEARCH_URL=https://elasticsearch + secured_es_connection=true + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_report_spans my-jaeger true 2 00 true 02 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=00 + ensure_reported_spans=true + test_step=02 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=00 + JOB_NUMBER=00 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./02-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./02-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' 00 03 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + job_number=00 + test_step=03 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=00 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./03-check-indices.yaml + JOB_NUMBER=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./03-assert.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' 01 04 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + job_number=01 + test_step=04 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=01 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./04-check-indices.yaml + JOB_NUMBER=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./04-assert.yaml + render_report_spans my-jaeger true 2 02 true 06 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=02 + ensure_reported_spans=true + test_step=06 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=02 + JOB_NUMBER=02 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./06-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./06-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' 02 07 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + job_number=02 + test_step=07 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=02 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./07-check-indices.yaml + JOB_NUMBER=02 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./07-assert.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' 03 08 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + job_number=03 + test_step=08 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=03 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./08-check-indices.yaml + JOB_NUMBER=03 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./08-assert.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' 04 09 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + job_number=04 + test_step=09 + escape_command ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=04 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./09-check-indices.yaml + JOB_NUMBER=04 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./09-assert.yaml + render_report_spans my-jaeger true 2 03 true 10 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=03 + ensure_reported_spans=true + test_step=10 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=03 + JOB_NUMBER=03 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./10-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./10-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + CRONJOB_NAME=my-jaeger-es-rollover + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/wait-for-cronjob-execution.yaml.template -o ./11-wait-rollover.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-000002'\'',' 05 11 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-000002'\'',' + job_number=05 + test_step=11 + escape_command ''\''--name'\'', '\''jaeger-span-000002'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-000002'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-000002'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-000002'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-000002'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=05 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-000002'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./11-check-indices.yaml + JOB_NUMBER=05 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./11-assert.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' 06 12 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + job_number=06 + test_step=12 + escape_command ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=06 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./12-check-indices.yaml + JOB_NUMBER=06 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./12-assert.yaml + '[' true = true ']' + get_elasticsearch_openshift_operator_version + export ESO_OPERATOR_VERSION + '[' true = true ']' ++ kubectl get pods -l name=elasticsearch-operator --all-namespaces '-o=jsonpath={.items[0].metadata.annotations.operatorframework\.io/properties}' + properties='{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.6"}}]}' + '[' -z '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.6"}}]}' ']' ++ echo '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.6"}}]}' ++ /tmp/jaeger-tests/bin/yq e -P '.properties.[] | select(.value.packageName == "elasticsearch-operator") | .value.version' + ESO_OPERATOR_VERSION=5.7.6 ++ version_ge 5.7.6 5.4 +++ echo 5.7.6 5.4 +++ tr ' ' '\n' +++ sort -rV +++ head -n 1 ++ test 5.7.6 == 5.7.6 + '[' -n '' ']' + skip_test es-rollover-managed 'Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 2 -ne 2 ']' + test_name=es-rollover-managed + message='Test only supported with Elasticsearch OpenShift Operator >= 5.4' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-rollover-autoprov + '[' es-rollover-autoprov '!=' _build ']' + cd .. + rm -rf es-rollover-managed + warning 'es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4\e[0m' WAR: es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4 + '[' true = true ']' + skip_test es-spark-dependencies 'This test is not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=es-spark-dependencies + message='This test is not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + rm -rf es-spark-dependencies + warning 'es-spark-dependencies: This test is not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-spark-dependencies: This test is not supported in OpenShift\e[0m' WAR: es-spark-dependencies: This test is not supported in OpenShift + [[ true = true ]] + [[ false = false ]] + start_test es-streaming-autoprovisioned + '[' 1 -ne 1 ']' + test_name=es-streaming-autoprovisioned + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-streaming-autoprovisioned' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-streaming-autoprovisioned\e[0m' Rendering files for test es-streaming-autoprovisioned + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-streaming-autoprovisioned + cd es-streaming-autoprovisioned + jaeger_name=auto-provisioned + render_assert_kafka true auto-provisioned 00 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provisioned + test_step=00 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./1-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_smoke_test auto-provisioned true 04 + '[' 3 -ne 3 ']' + jaeger=auto-provisioned + is_secured=true + test_step=04 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://auto-provisioned-query:443 + JAEGER_QUERY_ENDPOINT=https://auto-provisioned-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://auto-provisioned-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://auto-provisioned-collector-headless:14268 + export JAEGER_NAME=auto-provisioned + JAEGER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./04-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running elasticsearch E2E tests' Running elasticsearch E2E tests + cd tests/e2e/elasticsearch/_build + set +e + KUBECONFIG=/tmp/kubeconfig-2270117936 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 8 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/es-from-aio-to-production === PAUSE kuttl/harness/es-from-aio-to-production === RUN kuttl/harness/es-increasing-replicas === PAUSE kuttl/harness/es-increasing-replicas === RUN kuttl/harness/es-index-cleaner-autoprov === PAUSE kuttl/harness/es-index-cleaner-autoprov === RUN kuttl/harness/es-multiinstance === PAUSE kuttl/harness/es-multiinstance === RUN kuttl/harness/es-rollover-autoprov === PAUSE kuttl/harness/es-rollover-autoprov === RUN kuttl/harness/es-simple-prod === PAUSE kuttl/harness/es-simple-prod === RUN kuttl/harness/es-streaming-autoprovisioned === PAUSE kuttl/harness/es-streaming-autoprovisioned === CONT kuttl/harness/artifacts logger.go:42: 06:49:13 | artifacts | Creating namespace: kuttl-test-valued-duck logger.go:42: 06:49:13 | artifacts | artifacts events from ns kuttl-test-valued-duck: logger.go:42: 06:49:13 | artifacts | Deleting namespace: kuttl-test-valued-duck === CONT kuttl/harness/es-multiinstance logger.go:42: 06:49:19 | es-multiinstance | Ignoring 03-create-second-instance.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:49:19 | es-multiinstance | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:49:19 | es-multiinstance | Creating namespace: kuttl-test-direct-labrador logger.go:42: 06:49:19 | es-multiinstance/0-clear-namespace | starting test step 0-clear-namespace logger.go:42: 06:49:19 | es-multiinstance/0-clear-namespace | running command: [sh -c kubectl delete namespace jaeger-e2e-multiinstance-test --ignore-not-found=true] logger.go:42: 06:49:19 | es-multiinstance/0-clear-namespace | test step completed 0-clear-namespace logger.go:42: 06:49:19 | es-multiinstance/1-install | starting test step 1-install logger.go:42: 06:49:19 | es-multiinstance/1-install | Jaeger:kuttl-test-direct-labrador/instance-1 created logger.go:42: 06:50:16 | es-multiinstance/1-install | test step completed 1-install logger.go:42: 06:50:16 | es-multiinstance/2-create-namespace | starting test step 2-create-namespace logger.go:42: 06:50:16 | es-multiinstance/2-create-namespace | running command: [sh -c kubectl create namespace jaeger-e2e-multiinstance-test] logger.go:42: 06:50:16 | es-multiinstance/2-create-namespace | namespace/jaeger-e2e-multiinstance-test created logger.go:42: 06:50:16 | es-multiinstance/2-create-namespace | test step completed 2-create-namespace logger.go:42: 06:50:16 | es-multiinstance/3-create-second-instance | starting test step 3-create-second-instance logger.go:42: 06:50:16 | es-multiinstance/3-create-second-instance | running command: [sh -c kubectl apply -f ./01-install.yaml -n jaeger-e2e-multiinstance-test] logger.go:42: 06:50:17 | es-multiinstance/3-create-second-instance | jaeger.jaegertracing.io/instance-1 created logger.go:42: 06:50:17 | es-multiinstance/3-create-second-instance | running command: [sh -c /tmp/jaeger-tests/bin/kubectl-kuttl assert ./01-assert.yaml -n jaeger-e2e-multiinstance-test --timeout 1000] logger.go:42: 06:51:03 | es-multiinstance/3-create-second-instance | assert is valid logger.go:42: 06:51:03 | es-multiinstance/3-create-second-instance | test step completed 3-create-second-instance logger.go:42: 06:51:03 | es-multiinstance/4-check-secrets | starting test step 4-check-secrets logger.go:42: 06:51:03 | es-multiinstance/4-check-secrets | running command: [sh -c kubectl get secrets elasticsearch -o jsonpath='{.data.logging-es\.crt}' -n $NAMESPACE > secret1] logger.go:42: 06:51:03 | es-multiinstance/4-check-secrets | running command: [sh -c kubectl get secrets elasticsearch -o jsonpath='{.data.logging-es\.crt}' -n jaeger-e2e-multiinstance-test > secret2] logger.go:42: 06:51:03 | es-multiinstance/4-check-secrets | running command: [sh -c cmp --silent secret1 secret2 || exit 0] logger.go:42: 06:51:03 | es-multiinstance/4-check-secrets | test step completed 4-check-secrets logger.go:42: 06:51:03 | es-multiinstance/5-delete | starting test step 5-delete logger.go:42: 06:51:03 | es-multiinstance/5-delete | running command: [sh -c kubectl delete namespace jaeger-e2e-multiinstance-test --wait=false] logger.go:42: 06:51:03 | es-multiinstance/5-delete | namespace "jaeger-e2e-multiinstance-test" deleted logger.go:42: 06:51:03 | es-multiinstance/5-delete | test step completed 5-delete logger.go:42: 06:51:03 | es-multiinstance | es-multiinstance events from ns kuttl-test-direct-labrador: logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:26 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestdirectlabradorinstance1-1-5dbc94f687 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestdirectlabradorinstance1-1-5dbc9vfhmz replicaset-controller logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdirectlabradorinstance1-1-5dbc9vfhmz Binding Scheduled Successfully assigned kuttl-test-direct-labrador/elasticsearch-cdm-kuttltestdirectlabradorinstance1-1-5dbc9vfhmz to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:26 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestdirectlabradorinstance1-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestdirectlabradorinstance1-1-5dbc94f687 to 1 deployment-controller logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdirectlabradorinstance1-1-5dbc9vfhmz AddedInterface Add eth0 [10.129.2.21/23] from ovn-kubernetes logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdirectlabradorinstance1-1-5dbc9vfhmz.spec.containers{elasticsearch} Pulling Pulling image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdirectlabradorinstance1-1-5dbc9vfhmz.spec.containers{elasticsearch} Pulled Successfully pulled image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" in 5.112554134s (5.112616905s including waiting) kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdirectlabradorinstance1-1-5dbc9vfhmz.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdirectlabradorinstance1-1-5dbc9vfhmz.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdirectlabradorinstance1-1-5dbc9vfhmz.spec.containers{proxy} Pulling Pulling image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdirectlabradorinstance1-1-5dbc9vfhmz.spec.containers{proxy} Pulled Successfully pulled image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" in 2.500521807s (2.500532407s including waiting) kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdirectlabradorinstance1-1-5dbc9vfhmz.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdirectlabradorinstance1-1-5dbc9vfhmz.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:42 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestdirectlabradorinstance1-1-5dbc9vfhmz.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:47 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestdirectlabradorinstance1-1-5dbc9vfhmz.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:57 +0000 UTC Normal Deployment.apps instance-1-collector ScalingReplicaSet Scaled up replica set instance-1-collector-5dd4d98b8 to 1 deployment-controller logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:58 +0000 UTC Normal Pod instance-1-collector-5dd4d98b8-mswtj Binding Scheduled Successfully assigned kuttl-test-direct-labrador/instance-1-collector-5dd4d98b8-mswtj to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:58 +0000 UTC Normal Pod instance-1-collector-5dd4d98b8-mswtj AddedInterface Add eth0 [10.128.2.21/23] from ovn-kubernetes logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:58 +0000 UTC Normal Pod instance-1-collector-5dd4d98b8-mswtj.spec.containers{jaeger-collector} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:58 +0000 UTC Normal ReplicaSet.apps instance-1-collector-5dd4d98b8 SuccessfulCreate Created pod: instance-1-collector-5dd4d98b8-mswtj replicaset-controller logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:58 +0000 UTC Normal Pod instance-1-query-86c95b6cdd-dbfhs Binding Scheduled Successfully assigned kuttl-test-direct-labrador/instance-1-query-86c95b6cdd-dbfhs to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:58 +0000 UTC Warning Pod instance-1-query-86c95b6cdd-dbfhs FailedMount MountVolume.SetUp failed for volume "instance-1-ui-oauth-proxy-tls" : secret "instance-1-ui-oauth-proxy-tls" not found kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:58 +0000 UTC Normal Pod instance-1-query-86c95b6cdd-dbfhs AddedInterface Add eth0 [10.131.0.21/23] from ovn-kubernetes logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:58 +0000 UTC Normal Pod instance-1-query-86c95b6cdd-dbfhs.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:58 +0000 UTC Normal ReplicaSet.apps instance-1-query-86c95b6cdd SuccessfulCreate Created pod: instance-1-query-86c95b6cdd-dbfhs replicaset-controller logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:49:58 +0000 UTC Normal Deployment.apps instance-1-query ScalingReplicaSet Scaled up replica set instance-1-query-86c95b6cdd to 1 deployment-controller logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:50:02 +0000 UTC Normal Pod instance-1-collector-5dd4d98b8-mswtj.spec.containers{jaeger-collector} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" in 4.40686133s (4.40687844s including waiting) kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:50:02 +0000 UTC Normal Pod instance-1-collector-5dd4d98b8-mswtj.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:50:02 +0000 UTC Normal Pod instance-1-collector-5dd4d98b8-mswtj.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:50:11 +0000 UTC Normal Pod instance-1-query-86c95b6cdd-dbfhs.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" in 12.905379778s (12.905390639s including waiting) kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:50:11 +0000 UTC Normal Pod instance-1-query-86c95b6cdd-dbfhs.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:50:11 +0000 UTC Normal Pod instance-1-query-86c95b6cdd-dbfhs.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:50:11 +0000 UTC Normal Pod instance-1-query-86c95b6cdd-dbfhs.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:50:12 +0000 UTC Normal Pod instance-1-query-86c95b6cdd-dbfhs.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:50:12 +0000 UTC Normal Pod instance-1-query-86c95b6cdd-dbfhs.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:50:12 +0000 UTC Normal Pod instance-1-query-86c95b6cdd-dbfhs.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:50:13 +0000 UTC Normal Pod instance-1-query-86c95b6cdd-dbfhs.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" in 1.533668521s (1.533682361s including waiting) kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:50:13 +0000 UTC Normal Pod instance-1-query-86c95b6cdd-dbfhs.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:50:13 +0000 UTC Normal Pod instance-1-query-86c95b6cdd-dbfhs.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:50:14 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:50:14 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:50:14 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:51:00 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:51:00 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod instance-1-collector-5dd4d98b8-mswtj horizontal-pod-autoscaler logger.go:42: 06:51:03 | es-multiinstance | 2023-09-25 06:51:00 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 06:51:03 | es-multiinstance | Deleting namespace: kuttl-test-direct-labrador === CONT kuttl/harness/es-streaming-autoprovisioned logger.go:42: 06:51:10 | es-streaming-autoprovisioned | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:51:10 | es-streaming-autoprovisioned | Creating namespace: kuttl-test-pleasing-corgi logger.go:42: 06:51:10 | es-streaming-autoprovisioned/0-install | starting test step 0-install logger.go:42: 06:51:10 | es-streaming-autoprovisioned/0-install | Jaeger:kuttl-test-pleasing-corgi/auto-provisioned created logger.go:42: 06:52:25 | es-streaming-autoprovisioned/0-install | test step completed 0-install logger.go:42: 06:52:25 | es-streaming-autoprovisioned/1- | starting test step 1- logger.go:42: 06:52:54 | es-streaming-autoprovisioned/1- | test step completed 1- logger.go:42: 06:52:54 | es-streaming-autoprovisioned/2- | starting test step 2- logger.go:42: 06:53:26 | es-streaming-autoprovisioned/2- | test step completed 2- logger.go:42: 06:53:26 | es-streaming-autoprovisioned/3- | starting test step 3- logger.go:42: 06:53:35 | es-streaming-autoprovisioned/3- | test step completed 3- logger.go:42: 06:53:35 | es-streaming-autoprovisioned/4-smoke-test | starting test step 4-smoke-test logger.go:42: 06:53:35 | es-streaming-autoprovisioned/4-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE auto-provisioned /dev/null] logger.go:42: 06:53:38 | es-streaming-autoprovisioned/4-smoke-test | Warning: resource jaegers/auto-provisioned is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 06:53:45 | es-streaming-autoprovisioned/4-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JAEGER_COLLECTOR_ENDPOINT=http://auto-provisioned-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://auto-provisioned-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 06:53:46 | es-streaming-autoprovisioned/4-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 06:53:46 | es-streaming-autoprovisioned/4-smoke-test | job.batch/report-span created logger.go:42: 06:53:46 | es-streaming-autoprovisioned/4-smoke-test | job.batch/check-span created logger.go:42: 06:53:59 | es-streaming-autoprovisioned/4-smoke-test | test step completed 4-smoke-test logger.go:42: 06:53:59 | es-streaming-autoprovisioned | es-streaming-autoprovisioned events from ns kuttl-test-pleasing-corgi: logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:51:17 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestpleasingcorgiautoprovisioned-1-6df4c6ddf SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestpleasingcorgiautoprovisioned-1-l6lmd replicaset-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:51:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpleasingcorgiautoprovisioned-1-l6lmd Binding Scheduled Successfully assigned kuttl-test-pleasing-corgi/elasticsearch-cdm-kuttltestpleasingcorgiautoprovisioned-1-l6lmd to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:51:17 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestpleasingcorgiautoprovisioned-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestpleasingcorgiautoprovisioned-1-6df4c6ddf to 1 deployment-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:51:18 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpleasingcorgiautoprovisioned-1-l6lmd AddedInterface Add eth0 [10.128.2.23/23] from ovn-kubernetes logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:51:18 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpleasingcorgiautoprovisioned-1-l6lmd.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:51:18 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpleasingcorgiautoprovisioned-1-l6lmd.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:51:18 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpleasingcorgiautoprovisioned-1-l6lmd.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:51:18 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpleasingcorgiautoprovisioned-1-l6lmd.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:51:18 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpleasingcorgiautoprovisioned-1-l6lmd.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:51:18 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpleasingcorgiautoprovisioned-1-l6lmd.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:51:28 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpleasingcorgiautoprovisioned-1-l6lmd.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:51:33 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpleasingcorgiautoprovisioned-1-l6lmd.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:51:46 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:51:47 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-zookeeper NoPods No matching pods found controllermanager logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:51:47 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-pleasing-corgi/data-auto-provisioned-zookeeper-0" logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:51:47 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ExternalProvisioning waiting for a volume to be created, either by external provisioner "ebs.csi.aws.com" or manually created by system administrator persistentvolume-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:51:50 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-564e8e5d-cdd3-4386-a740-77938192f65d logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:51:51 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-pleasing-corgi/auto-provisioned-zookeeper-0 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:51:53 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-564e8e5d-cdd3-4386-a740-77938192f65d" attachdetach-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:51:54 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 AddedInterface Add eth0 [10.131.0.24/23] from ovn-kubernetes logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:51:54 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulling Pulling image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:02 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulled Successfully pulled image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" in 7.507159849s (7.507169069s including waiting) kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:02 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:02 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:25 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:26 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-kafka NoPods No matching pods found controllermanager logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:26 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ExternalProvisioning waiting for a volume to be created, either by external provisioner "ebs.csi.aws.com" or manually created by system administrator persistentvolume-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:26 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-pleasing-corgi/data-0-auto-provisioned-kafka-0" logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:29 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-d2875097-e863-4fc6-87f0-40c7c6ccec07 logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:30 +0000 UTC Normal Pod auto-provisioned-kafka-0 Binding Scheduled Successfully assigned kuttl-test-pleasing-corgi/auto-provisioned-kafka-0 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:32 +0000 UTC Normal Pod auto-provisioned-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-d2875097-e863-4fc6-87f0-40c7c6ccec07" attachdetach-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:33 +0000 UTC Normal Pod auto-provisioned-kafka-0 AddedInterface Add eth0 [10.131.0.25/23] from ovn-kubernetes logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:33 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" already present on machine kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:33 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:33 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:55 +0000 UTC Normal Pod auto-provisioned-entity-operator-75799b9d45-qdr42 Binding Scheduled Successfully assigned kuttl-test-pleasing-corgi/auto-provisioned-entity-operator-75799b9d45-qdr42 to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:55 +0000 UTC Normal Pod auto-provisioned-entity-operator-75799b9d45-qdr42 AddedInterface Add eth0 [10.129.2.22/23] from ovn-kubernetes logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:55 +0000 UTC Normal Pod auto-provisioned-entity-operator-75799b9d45-qdr42.spec.containers{topic-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:abdf1ca15c55860e7962057fcb84d2eef800996fcbc3f9f80eeb7efa79dbafcc" already present on machine kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:55 +0000 UTC Normal Pod auto-provisioned-entity-operator-75799b9d45-qdr42.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:55 +0000 UTC Normal Pod auto-provisioned-entity-operator-75799b9d45-qdr42.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:55 +0000 UTC Normal Pod auto-provisioned-entity-operator-75799b9d45-qdr42.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:abdf1ca15c55860e7962057fcb84d2eef800996fcbc3f9f80eeb7efa79dbafcc" already present on machine kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:55 +0000 UTC Normal ReplicaSet.apps auto-provisioned-entity-operator-75799b9d45 SuccessfulCreate Created pod: auto-provisioned-entity-operator-75799b9d45-qdr42 replicaset-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:55 +0000 UTC Normal Deployment.apps auto-provisioned-entity-operator ScalingReplicaSet Scaled up replica set auto-provisioned-entity-operator-75799b9d45 to 1 deployment-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:56 +0000 UTC Normal Pod auto-provisioned-entity-operator-75799b9d45-qdr42.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:56 +0000 UTC Normal Pod auto-provisioned-entity-operator-75799b9d45-qdr42.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:52:56 +0000 UTC Normal Pod auto-provisioned-entity-operator-75799b9d45-qdr42.spec.containers{tls-sidecar} Pulling Pulling image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:05 +0000 UTC Normal Pod auto-provisioned-entity-operator-75799b9d45-qdr42.spec.containers{tls-sidecar} Pulled Successfully pulled image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" in 8.943410818s (8.943423088s including waiting) kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:05 +0000 UTC Normal Pod auto-provisioned-entity-operator-75799b9d45-qdr42.spec.containers{tls-sidecar} Created Created container tls-sidecar kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:05 +0000 UTC Normal Pod auto-provisioned-entity-operator-75799b9d45-qdr42.spec.containers{tls-sidecar} Started Started container tls-sidecar kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:28 +0000 UTC Normal Pod auto-provisioned-collector-7f4f555748-l8dhz Binding Scheduled Successfully assigned kuttl-test-pleasing-corgi/auto-provisioned-collector-7f4f555748-l8dhz to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:28 +0000 UTC Normal ReplicaSet.apps auto-provisioned-collector-7f4f555748 SuccessfulCreate Created pod: auto-provisioned-collector-7f4f555748-l8dhz replicaset-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:28 +0000 UTC Normal Deployment.apps auto-provisioned-collector ScalingReplicaSet Scaled up replica set auto-provisioned-collector-7f4f555748 to 1 deployment-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:28 +0000 UTC Normal Deployment.apps auto-provisioned-ingester ScalingReplicaSet Scaled up replica set auto-provisioned-ingester-57c9f8756c to 1 deployment-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:28 +0000 UTC Normal Pod auto-provisioned-query-7648c5c997-299hh Binding Scheduled Successfully assigned kuttl-test-pleasing-corgi/auto-provisioned-query-7648c5c997-299hh to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:28 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-7648c5c997 SuccessfulCreate Created pod: auto-provisioned-query-7648c5c997-299hh replicaset-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:28 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-7648c5c997 to 1 deployment-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:29 +0000 UTC Normal Pod auto-provisioned-collector-7f4f555748-l8dhz AddedInterface Add eth0 [10.131.0.26/23] from ovn-kubernetes logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:29 +0000 UTC Normal Pod auto-provisioned-collector-7f4f555748-l8dhz.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:29 +0000 UTC Normal Pod auto-provisioned-collector-7f4f555748-l8dhz.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:29 +0000 UTC Normal Pod auto-provisioned-collector-7f4f555748-l8dhz.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:29 +0000 UTC Normal Pod auto-provisioned-ingester-57c9f8756c-q75hb Binding Scheduled Successfully assigned kuttl-test-pleasing-corgi/auto-provisioned-ingester-57c9f8756c-q75hb to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:29 +0000 UTC Normal Pod auto-provisioned-ingester-57c9f8756c-q75hb AddedInterface Add eth0 [10.129.2.23/23] from ovn-kubernetes logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:29 +0000 UTC Normal Pod auto-provisioned-ingester-57c9f8756c-q75hb.spec.containers{jaeger-ingester} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:6f019a3bc4c491d31a1af50e2929aa6e01b6c2c1fc12acbd0ef12204f4e56d07" kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:29 +0000 UTC Normal ReplicaSet.apps auto-provisioned-ingester-57c9f8756c SuccessfulCreate Created pod: auto-provisioned-ingester-57c9f8756c-q75hb replicaset-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:29 +0000 UTC Warning Pod auto-provisioned-query-7648c5c997-299hh FailedMount MountVolume.SetUp failed for volume "auto-provisioned-ui-oauth-proxy-tls" : secret "auto-provisioned-ui-oauth-proxy-tls" not found kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:29 +0000 UTC Normal Pod auto-provisioned-query-7648c5c997-299hh AddedInterface Add eth0 [10.131.0.27/23] from ovn-kubernetes logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:29 +0000 UTC Normal Pod auto-provisioned-query-7648c5c997-299hh.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:30 +0000 UTC Normal Pod auto-provisioned-query-7648c5c997-299hh.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:30 +0000 UTC Normal Pod auto-provisioned-query-7648c5c997-299hh.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:30 +0000 UTC Normal Pod auto-provisioned-query-7648c5c997-299hh.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:30 +0000 UTC Normal Pod auto-provisioned-query-7648c5c997-299hh.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:30 +0000 UTC Normal Pod auto-provisioned-query-7648c5c997-299hh.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:30 +0000 UTC Normal Pod auto-provisioned-query-7648c5c997-299hh.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:30 +0000 UTC Normal Pod auto-provisioned-query-7648c5c997-299hh.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:30 +0000 UTC Normal Pod auto-provisioned-query-7648c5c997-299hh.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:33 +0000 UTC Normal Pod auto-provisioned-ingester-57c9f8756c-q75hb.spec.containers{jaeger-ingester} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:6f019a3bc4c491d31a1af50e2929aa6e01b6c2c1fc12acbd0ef12204f4e56d07" in 4.040721591s (4.040738692s including waiting) kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:33 +0000 UTC Normal Pod auto-provisioned-ingester-57c9f8756c-q75hb.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:33 +0000 UTC Normal Pod auto-provisioned-ingester-57c9f8756c-q75hb.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:39 +0000 UTC Normal Pod auto-provisioned-query-7648c5c997-299hh.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:39 +0000 UTC Normal Pod auto-provisioned-query-7648c5c997-299hh.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:39 +0000 UTC Normal Pod auto-provisioned-query-7648c5c997-299hh.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:39 +0000 UTC Warning Pod auto-provisioned-query-7648c5c997-299hh.spec.containers{jaeger-query} Unhealthy Readiness probe failed: Get "http://10.131.0.27:16687/": dial tcp 10.131.0.27:16687: connect: connection refused kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:39 +0000 UTC Warning Pod auto-provisioned-query-7648c5c997-299hh.spec.containers{jaeger-agent} Unhealthy Readiness probe failed: Get "http://10.131.0.27:14271/": dial tcp 10.131.0.27:14271: connect: connection refused kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:39 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-7648c5c997 SuccessfulDelete Deleted pod: auto-provisioned-query-7648c5c997-299hh replicaset-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:39 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled down replica set auto-provisioned-query-7648c5c997 to 0 from 1 deployment-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:40 +0000 UTC Normal Pod auto-provisioned-query-74d8bd6f5-mrhh6 Binding Scheduled Successfully assigned kuttl-test-pleasing-corgi/auto-provisioned-query-74d8bd6f5-mrhh6 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:40 +0000 UTC Normal Pod auto-provisioned-query-74d8bd6f5-mrhh6 AddedInterface Add eth0 [10.131.0.28/23] from ovn-kubernetes logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:40 +0000 UTC Normal Pod auto-provisioned-query-74d8bd6f5-mrhh6.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:40 +0000 UTC Normal Pod auto-provisioned-query-74d8bd6f5-mrhh6.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:40 +0000 UTC Normal Pod auto-provisioned-query-74d8bd6f5-mrhh6.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:40 +0000 UTC Normal Pod auto-provisioned-query-74d8bd6f5-mrhh6.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:40 +0000 UTC Normal Pod auto-provisioned-query-74d8bd6f5-mrhh6.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:40 +0000 UTC Normal Pod auto-provisioned-query-74d8bd6f5-mrhh6.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:40 +0000 UTC Normal Pod auto-provisioned-query-74d8bd6f5-mrhh6.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:40 +0000 UTC Normal Pod auto-provisioned-query-74d8bd6f5-mrhh6.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:40 +0000 UTC Normal Pod auto-provisioned-query-74d8bd6f5-mrhh6.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:40 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-74d8bd6f5 SuccessfulCreate Created pod: auto-provisioned-query-74d8bd6f5-mrhh6 replicaset-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:40 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-74d8bd6f5 to 1 deployment-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:46 +0000 UTC Normal Pod check-span-mb9p8 Binding Scheduled Successfully assigned kuttl-test-pleasing-corgi/check-span-mb9p8 to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:46 +0000 UTC Normal Pod check-span-mb9p8 AddedInterface Add eth0 [10.129.2.25/23] from ovn-kubernetes logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:46 +0000 UTC Normal Pod check-span-mb9p8.spec.containers{asserts-container} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:46 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-mb9p8 job-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:46 +0000 UTC Normal Pod report-span-hfntj Binding Scheduled Successfully assigned kuttl-test-pleasing-corgi/report-span-hfntj to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:46 +0000 UTC Normal Pod report-span-hfntj AddedInterface Add eth0 [10.129.2.24/23] from ovn-kubernetes logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:46 +0000 UTC Normal Pod report-span-hfntj.spec.containers{report-span} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:46 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-hfntj job-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:48 +0000 UTC Normal Pod check-span-mb9p8.spec.containers{asserts-container} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" in 1.4590316s (1.4590377s including waiting) kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:48 +0000 UTC Normal Pod check-span-mb9p8.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:48 +0000 UTC Normal Pod check-span-mb9p8.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:48 +0000 UTC Normal Pod report-span-hfntj.spec.containers{report-span} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" in 1.458981299s (1.45899415s including waiting) kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:48 +0000 UTC Normal Pod report-span-hfntj.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:48 +0000 UTC Normal Pod report-span-hfntj.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:50 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:50 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:50 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:50 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:50 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:50 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:53:59 | es-streaming-autoprovisioned | 2023-09-25 06:53:58 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 06:53:59 | es-streaming-autoprovisioned | Deleting namespace: kuttl-test-pleasing-corgi === CONT kuttl/harness/es-simple-prod logger.go:42: 06:54:13 | es-simple-prod | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:54:13 | es-simple-prod | Creating namespace: kuttl-test-hardy-anchovy logger.go:42: 06:54:13 | es-simple-prod | es-simple-prod events from ns kuttl-test-hardy-anchovy: logger.go:42: 06:54:13 | es-simple-prod | Deleting namespace: kuttl-test-hardy-anchovy === CONT kuttl/harness/es-rollover-autoprov logger.go:42: 06:54:19 | es-rollover-autoprov | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:54:19 | es-rollover-autoprov | Creating namespace: kuttl-test-content-bear logger.go:42: 06:54:19 | es-rollover-autoprov/1-install | starting test step 1-install logger.go:42: 06:54:19 | es-rollover-autoprov/1-install | Jaeger:kuttl-test-content-bear/my-jaeger created logger.go:42: 06:54:54 | es-rollover-autoprov/1-install | test step completed 1-install logger.go:42: 06:54:54 | es-rollover-autoprov/2-report-spans | starting test step 2-report-spans logger.go:42: 06:54:54 | es-rollover-autoprov/2-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 06:54:55 | es-rollover-autoprov/2-report-spans | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 06:55:02 | es-rollover-autoprov/2-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JOB_NUMBER=00 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-00-job.yaml] logger.go:42: 06:55:02 | es-rollover-autoprov/2-report-spans | running command: [sh -c kubectl apply -f report-span-00-job.yaml -n $NAMESPACE] logger.go:42: 06:55:02 | es-rollover-autoprov/2-report-spans | job.batch/00-report-span created logger.go:42: 06:55:26 | es-rollover-autoprov/2-report-spans | test step completed 2-report-spans logger.go:42: 06:55:26 | es-rollover-autoprov/3-check-indices | starting test step 3-check-indices logger.go:42: 06:55:26 | es-rollover-autoprov/3-check-indices | Job:kuttl-test-content-bear/00-check-indices created logger.go:42: 06:55:31 | es-rollover-autoprov/3-check-indices | test step completed 3-check-indices logger.go:42: 06:55:31 | es-rollover-autoprov/4-check-indices | starting test step 4-check-indices logger.go:42: 06:55:31 | es-rollover-autoprov/4-check-indices | Job:kuttl-test-content-bear/01-check-indices created logger.go:42: 06:55:34 | es-rollover-autoprov/4-check-indices | test step completed 4-check-indices logger.go:42: 06:55:34 | es-rollover-autoprov/5-install | starting test step 5-install logger.go:42: 06:55:34 | es-rollover-autoprov/5-install | Jaeger:kuttl-test-content-bear/my-jaeger updated logger.go:42: 06:55:55 | es-rollover-autoprov/5-install | test step completed 5-install logger.go:42: 06:55:55 | es-rollover-autoprov/6-report-spans | starting test step 6-report-spans logger.go:42: 06:55:55 | es-rollover-autoprov/6-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 06:56:02 | es-rollover-autoprov/6-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JOB_NUMBER=02 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-02-job.yaml] logger.go:42: 06:56:03 | es-rollover-autoprov/6-report-spans | running command: [sh -c kubectl apply -f report-span-02-job.yaml -n $NAMESPACE] logger.go:42: 06:56:03 | es-rollover-autoprov/6-report-spans | job.batch/02-report-span created logger.go:42: 06:56:27 | es-rollover-autoprov/6-report-spans | test step completed 6-report-spans logger.go:42: 06:56:27 | es-rollover-autoprov/7-check-indices | starting test step 7-check-indices logger.go:42: 06:56:27 | es-rollover-autoprov/7-check-indices | Job:kuttl-test-content-bear/02-check-indices created logger.go:42: 06:56:31 | es-rollover-autoprov/7-check-indices | test step completed 7-check-indices logger.go:42: 06:56:31 | es-rollover-autoprov/8-check-indices | starting test step 8-check-indices logger.go:42: 06:56:31 | es-rollover-autoprov/8-check-indices | Job:kuttl-test-content-bear/03-check-indices created logger.go:42: 06:56:35 | es-rollover-autoprov/8-check-indices | test step completed 8-check-indices logger.go:42: 06:56:35 | es-rollover-autoprov/9-check-indices | starting test step 9-check-indices logger.go:42: 06:56:35 | es-rollover-autoprov/9-check-indices | Job:kuttl-test-content-bear/04-check-indices created logger.go:42: 06:56:39 | es-rollover-autoprov/9-check-indices | test step completed 9-check-indices logger.go:42: 06:56:39 | es-rollover-autoprov/10-report-spans | starting test step 10-report-spans logger.go:42: 06:56:39 | es-rollover-autoprov/10-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 06:56:47 | es-rollover-autoprov/10-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JOB_NUMBER=03 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-03-job.yaml] logger.go:42: 06:56:47 | es-rollover-autoprov/10-report-spans | running command: [sh -c kubectl apply -f report-span-03-job.yaml -n $NAMESPACE] logger.go:42: 06:56:47 | es-rollover-autoprov/10-report-spans | job.batch/03-report-span created logger.go:42: 06:57:11 | es-rollover-autoprov/10-report-spans | test step completed 10-report-spans logger.go:42: 06:57:11 | es-rollover-autoprov/11-check-indices | starting test step 11-check-indices logger.go:42: 06:57:11 | es-rollover-autoprov/11-check-indices | running command: [sh -c go run ../../../../cmd-utils/wait-cronjob/main.go --cronjob my-jaeger-es-rollover --namespace $NAMESPACE] logger.go:42: 06:57:22 | es-rollover-autoprov/11-check-indices | time="2023-09-25T06:57:22Z" level=debug msg="Checking if the my-jaeger-es-rollover CronJob exists" logger.go:42: 06:57:22 | es-rollover-autoprov/11-check-indices | time="2023-09-25T06:57:22Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 06:57:22 | es-rollover-autoprov/11-check-indices | time="2023-09-25T06:57:22Z" level=info msg="Cronjob my-jaeger-es-rollover found successfully" logger.go:42: 06:57:22 | es-rollover-autoprov/11-check-indices | time="2023-09-25T06:57:22Z" level=debug msg="Waiting for the next scheduled job from my-jaeger-es-rollover cronjob" logger.go:42: 06:57:22 | es-rollover-autoprov/11-check-indices | time="2023-09-25T06:57:22Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 06:57:22 | es-rollover-autoprov/11-check-indices | time="2023-09-25T06:57:22Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 06:57:32 | es-rollover-autoprov/11-check-indices | time="2023-09-25T06:57:32Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 06:57:42 | es-rollover-autoprov/11-check-indices | time="2023-09-25T06:57:42Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 06:57:52 | es-rollover-autoprov/11-check-indices | time="2023-09-25T06:57:52Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 06:58:02 | es-rollover-autoprov/11-check-indices | time="2023-09-25T06:58:02Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 06:58:12 | es-rollover-autoprov/11-check-indices | time="2023-09-25T06:58:12Z" level=info msg="Job of owner my-jaeger-es-rollover succeeded after my-jaeger-es-rollover 50.029534121s" logger.go:42: 06:58:12 | es-rollover-autoprov/11-check-indices | Job:kuttl-test-content-bear/05-check-indices created logger.go:42: 06:58:16 | es-rollover-autoprov/11-check-indices | test step completed 11-check-indices logger.go:42: 06:58:16 | es-rollover-autoprov/12-check-indices | starting test step 12-check-indices logger.go:42: 06:58:16 | es-rollover-autoprov/12-check-indices | Job:kuttl-test-content-bear/06-check-indices created logger.go:42: 06:58:20 | es-rollover-autoprov/12-check-indices | test step completed 12-check-indices logger.go:42: 06:58:20 | es-rollover-autoprov | es-rollover-autoprov events from ns kuttl-test-content-bear: logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:25 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestcontentbearmyjaeger-1-5bd7458f65 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestcontentbearmyjaeger-1-5bd7458f6f7n7v replicaset-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcontentbearmyjaeger-1-5bd7458f6f7n7v Binding Scheduled Successfully assigned kuttl-test-content-bear/elasticsearch-cdm-kuttltestcontentbearmyjaeger-1-5bd7458f6f7n7v to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcontentbearmyjaeger-1-5bd7458f6f7n7v AddedInterface Add eth0 [10.128.2.24/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcontentbearmyjaeger-1-5bd7458f6f7n7v.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcontentbearmyjaeger-1-5bd7458f6f7n7v.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcontentbearmyjaeger-1-5bd7458f6f7n7v.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcontentbearmyjaeger-1-5bd7458f6f7n7v.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:25 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestcontentbearmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestcontentbearmyjaeger-1-5bd7458f65 to 1 deployment-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcontentbearmyjaeger-1-5bd7458f6f7n7v.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcontentbearmyjaeger-1-5bd7458f6f7n7v.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:35 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestcontentbearmyjaeger-1-5bd7458f6f7n7v.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:40 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestcontentbearmyjaeger-1-5bd7458f6f7n7v.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:52 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-hctz8 Binding Scheduled Successfully assigned kuttl-test-content-bear/my-jaeger-collector-558ccfc8dd-hctz8 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:52 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-hctz8 AddedInterface Add eth0 [10.131.0.29/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:52 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-hctz8.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:52 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-hctz8.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:52 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-hctz8.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:52 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-558ccfc8dd SuccessfulCreate Created pod: my-jaeger-collector-558ccfc8dd-hctz8 replicaset-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:52 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-558ccfc8dd to 1 deployment-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:52 +0000 UTC Normal Pod my-jaeger-query-768d7f8b7f-d8klh Binding Scheduled Successfully assigned kuttl-test-content-bear/my-jaeger-query-768d7f8b7f-d8klh to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:52 +0000 UTC Normal Pod my-jaeger-query-768d7f8b7f-d8klh AddedInterface Add eth0 [10.131.0.30/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:52 +0000 UTC Normal Pod my-jaeger-query-768d7f8b7f-d8klh.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:52 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-768d7f8b7f SuccessfulCreate Created pod: my-jaeger-query-768d7f8b7f-d8klh replicaset-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:52 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-768d7f8b7f to 1 deployment-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:53 +0000 UTC Normal Pod my-jaeger-query-768d7f8b7f-d8klh.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:53 +0000 UTC Normal Pod my-jaeger-query-768d7f8b7f-d8klh.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:53 +0000 UTC Normal Pod my-jaeger-query-768d7f8b7f-d8klh.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:53 +0000 UTC Normal Pod my-jaeger-query-768d7f8b7f-d8klh.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:53 +0000 UTC Normal Pod my-jaeger-query-768d7f8b7f-d8klh.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:53 +0000 UTC Normal Pod my-jaeger-query-768d7f8b7f-d8klh.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:53 +0000 UTC Normal Pod my-jaeger-query-768d7f8b7f-d8klh.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:53 +0000 UTC Normal Pod my-jaeger-query-768d7f8b7f-d8klh.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:58 +0000 UTC Normal Pod my-jaeger-query-768d7f8b7f-d8klh.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:58 +0000 UTC Normal Pod my-jaeger-query-768d7f8b7f-d8klh.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:58 +0000 UTC Normal Pod my-jaeger-query-768d7f8b7f-d8klh.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:58 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-768d7f8b7f SuccessfulDelete Deleted pod: my-jaeger-query-768d7f8b7f-d8klh replicaset-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:58 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-768d7f8b7f to 0 from 1 deployment-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:59 +0000 UTC Normal Pod my-jaeger-query-6f44ffc5c9-2cc9b Binding Scheduled Successfully assigned kuttl-test-content-bear/my-jaeger-query-6f44ffc5c9-2cc9b to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:59 +0000 UTC Normal Pod my-jaeger-query-6f44ffc5c9-2cc9b AddedInterface Add eth0 [10.131.0.31/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:59 +0000 UTC Normal Pod my-jaeger-query-6f44ffc5c9-2cc9b.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:59 +0000 UTC Normal Pod my-jaeger-query-6f44ffc5c9-2cc9b.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:59 +0000 UTC Normal Pod my-jaeger-query-6f44ffc5c9-2cc9b.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:59 +0000 UTC Normal Pod my-jaeger-query-6f44ffc5c9-2cc9b.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:59 +0000 UTC Normal Pod my-jaeger-query-6f44ffc5c9-2cc9b.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:59 +0000 UTC Normal Pod my-jaeger-query-6f44ffc5c9-2cc9b.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:59 +0000 UTC Normal Pod my-jaeger-query-6f44ffc5c9-2cc9b.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:59 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-6f44ffc5c9 SuccessfulCreate Created pod: my-jaeger-query-6f44ffc5c9-2cc9b replicaset-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:54:59 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-6f44ffc5c9 to 1 deployment-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:00 +0000 UTC Normal Pod my-jaeger-query-6f44ffc5c9-2cc9b.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:00 +0000 UTC Normal Pod my-jaeger-query-6f44ffc5c9-2cc9b.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:02 +0000 UTC Normal Pod 00-report-span-pw25w Binding Scheduled Successfully assigned kuttl-test-content-bear/00-report-span-pw25w to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:02 +0000 UTC Normal Job.batch 00-report-span SuccessfulCreate Created pod: 00-report-span-pw25w job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:03 +0000 UTC Normal Pod 00-report-span-pw25w AddedInterface Add eth0 [10.129.2.26/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:03 +0000 UTC Normal Pod 00-report-span-pw25w.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:03 +0000 UTC Normal Pod 00-report-span-pw25w.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:03 +0000 UTC Normal Pod 00-report-span-pw25w.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:07 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:07 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:07 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:26 +0000 UTC Normal Job.batch 00-check-indices SuccessfulCreate Created pod: 00-check-indices-82ftj job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:26 +0000 UTC Normal Job.batch 00-report-span Completed Job completed job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:27 +0000 UTC Normal Pod 00-check-indices-82ftj Binding Scheduled Successfully assigned kuttl-test-content-bear/00-check-indices-82ftj to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:27 +0000 UTC Normal Pod 00-check-indices-82ftj AddedInterface Add eth0 [10.129.2.27/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:27 +0000 UTC Normal Pod 00-check-indices-82ftj.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:27 +0000 UTC Normal Pod 00-check-indices-82ftj.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:27 +0000 UTC Normal Pod 00-check-indices-82ftj.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:30 +0000 UTC Normal Job.batch 00-check-indices Completed Job completed job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:31 +0000 UTC Normal Pod 01-check-indices-s5ssn Binding Scheduled Successfully assigned kuttl-test-content-bear/01-check-indices-s5ssn to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:31 +0000 UTC Normal Pod 01-check-indices-s5ssn AddedInterface Add eth0 [10.129.2.28/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:31 +0000 UTC Normal Pod 01-check-indices-s5ssn.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:31 +0000 UTC Normal Pod 01-check-indices-s5ssn.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:31 +0000 UTC Normal Pod 01-check-indices-s5ssn.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:31 +0000 UTC Normal Job.batch 01-check-indices SuccessfulCreate Created pod: 01-check-indices-s5ssn job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:34 +0000 UTC Normal Job.batch 01-check-indices Completed Job completed job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:35 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-ht9sx Binding Scheduled Successfully assigned kuttl-test-content-bear/my-jaeger-es-rollover-create-mapping-ht9sx to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:35 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-ht9sx AddedInterface Add eth0 [10.129.2.29/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:35 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-ht9sx.spec.containers{my-jaeger-es-rollover-create-mapping} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:35 +0000 UTC Normal Job.batch my-jaeger-es-rollover-create-mapping SuccessfulCreate Created pod: my-jaeger-es-rollover-create-mapping-ht9sx job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:52 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:52 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod my-jaeger-collector-558ccfc8dd-hctz8 horizontal-pod-autoscaler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:52 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:52 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-ht9sx.spec.containers{my-jaeger-es-rollover-create-mapping} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" in 16.625778898s (16.625790979s including waiting) kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:52 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-ht9sx.spec.containers{my-jaeger-es-rollover-create-mapping} Created Created container my-jaeger-es-rollover-create-mapping kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:52 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-ht9sx.spec.containers{my-jaeger-es-rollover-create-mapping} Started Started container my-jaeger-es-rollover-create-mapping kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:55 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-hctz8.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:55 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-558ccfc8dd SuccessfulDelete Deleted pod: my-jaeger-collector-558ccfc8dd-hctz8 replicaset-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:55 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled down replica set my-jaeger-collector-558ccfc8dd to 0 from 1 deployment-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:55 +0000 UTC Normal Job.batch my-jaeger-es-rollover-create-mapping Completed Job completed job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:55 +0000 UTC Normal Pod my-jaeger-query-6f44ffc5c9-2cc9b.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:55 +0000 UTC Normal Pod my-jaeger-query-6f44ffc5c9-2cc9b.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:55 +0000 UTC Normal Pod my-jaeger-query-6f44ffc5c9-2cc9b.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:55 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-6f44ffc5c9 SuccessfulDelete Deleted pod: my-jaeger-query-6f44ffc5c9-2cc9b replicaset-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:55 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-6f44ffc5c9 to 0 from 1 deployment-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:56 +0000 UTC Normal Pod my-jaeger-collector-74dd5d98f7-xrtr4 Binding Scheduled Successfully assigned kuttl-test-content-bear/my-jaeger-collector-74dd5d98f7-xrtr4 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:56 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-74dd5d98f7 SuccessfulCreate Created pod: my-jaeger-collector-74dd5d98f7-xrtr4 replicaset-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:56 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-74dd5d98f7 to 1 deployment-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:56 +0000 UTC Normal Pod my-jaeger-query-cdddc5578-42sd2 Binding Scheduled Successfully assigned kuttl-test-content-bear/my-jaeger-query-cdddc5578-42sd2 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:56 +0000 UTC Normal Pod my-jaeger-query-cdddc5578-42sd2 AddedInterface Add eth0 [10.131.0.32/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:56 +0000 UTC Normal Pod my-jaeger-query-cdddc5578-42sd2.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:56 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-cdddc5578 SuccessfulCreate Created pod: my-jaeger-query-cdddc5578-42sd2 replicaset-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:56 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-cdddc5578 to 1 deployment-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:57 +0000 UTC Normal Pod my-jaeger-collector-74dd5d98f7-xrtr4 AddedInterface Add eth0 [10.131.0.33/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:57 +0000 UTC Normal Pod my-jaeger-collector-74dd5d98f7-xrtr4.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:57 +0000 UTC Normal Pod my-jaeger-collector-74dd5d98f7-xrtr4.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:57 +0000 UTC Normal Pod my-jaeger-collector-74dd5d98f7-xrtr4.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:57 +0000 UTC Normal Pod my-jaeger-query-cdddc5578-42sd2.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:57 +0000 UTC Normal Pod my-jaeger-query-cdddc5578-42sd2.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:57 +0000 UTC Normal Pod my-jaeger-query-cdddc5578-42sd2.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:57 +0000 UTC Normal Pod my-jaeger-query-cdddc5578-42sd2.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:57 +0000 UTC Normal Pod my-jaeger-query-cdddc5578-42sd2.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:57 +0000 UTC Normal Pod my-jaeger-query-cdddc5578-42sd2.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:57 +0000 UTC Normal Pod my-jaeger-query-cdddc5578-42sd2.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:55:57 +0000 UTC Normal Pod my-jaeger-query-cdddc5578-42sd2.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28260416-ndhkg Binding Scheduled Successfully assigned kuttl-test-content-bear/my-jaeger-es-lookback-28260416-ndhkg to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28260416-ndhkg AddedInterface Add eth0 [10.129.2.31/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28260416-ndhkg.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28260416-ndhkg.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28260416-ndhkg.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28260416 SuccessfulCreate Created pod: my-jaeger-es-lookback-28260416-ndhkg job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28260416 cronjob-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28260416-87gsk Binding Scheduled Successfully assigned kuttl-test-content-bear/my-jaeger-es-rollover-28260416-87gsk to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28260416-87gsk AddedInterface Add eth0 [10.129.2.30/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28260416-87gsk.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28260416-87gsk.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28260416-87gsk.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28260416 SuccessfulCreate Created pod: my-jaeger-es-rollover-28260416-87gsk job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28260416 cronjob-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:03 +0000 UTC Normal Pod 02-report-span-tqzrx Binding Scheduled Successfully assigned kuttl-test-content-bear/02-report-span-tqzrx to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:03 +0000 UTC Normal Pod 02-report-span-tqzrx AddedInterface Add eth0 [10.129.2.32/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:03 +0000 UTC Normal Pod 02-report-span-tqzrx.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:03 +0000 UTC Normal Job.batch 02-report-span SuccessfulCreate Created pod: 02-report-span-tqzrx job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28260416 Completed Job completed job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28260416, status: Complete cronjob-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:04 +0000 UTC Normal Pod 02-report-span-tqzrx.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:04 +0000 UTC Normal Pod 02-report-span-tqzrx.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:04 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28260416 Completed Job completed job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:04 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28260416, status: Complete cronjob-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:27 +0000 UTC Normal Pod 02-check-indices-lgzqc Binding Scheduled Successfully assigned kuttl-test-content-bear/02-check-indices-lgzqc to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:27 +0000 UTC Normal Job.batch 02-check-indices SuccessfulCreate Created pod: 02-check-indices-lgzqc job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:27 +0000 UTC Normal Job.batch 02-report-span Completed Job completed job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:28 +0000 UTC Normal Pod 02-check-indices-lgzqc AddedInterface Add eth0 [10.129.2.33/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:28 +0000 UTC Normal Pod 02-check-indices-lgzqc.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:28 +0000 UTC Normal Pod 02-check-indices-lgzqc.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:28 +0000 UTC Normal Pod 02-check-indices-lgzqc.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:31 +0000 UTC Normal Job.batch 02-check-indices Completed Job completed job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:31 +0000 UTC Normal Pod 03-check-indices-xvfwp Binding Scheduled Successfully assigned kuttl-test-content-bear/03-check-indices-xvfwp to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:31 +0000 UTC Normal Job.batch 03-check-indices SuccessfulCreate Created pod: 03-check-indices-xvfwp job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:32 +0000 UTC Normal Pod 03-check-indices-xvfwp AddedInterface Add eth0 [10.129.2.34/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:32 +0000 UTC Normal Pod 03-check-indices-xvfwp.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:32 +0000 UTC Normal Pod 03-check-indices-xvfwp.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:32 +0000 UTC Normal Pod 03-check-indices-xvfwp.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:35 +0000 UTC Normal Job.batch 03-check-indices Completed Job completed job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:35 +0000 UTC Normal Pod 04-check-indices-vlw8c Binding Scheduled Successfully assigned kuttl-test-content-bear/04-check-indices-vlw8c to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:35 +0000 UTC Normal Job.batch 04-check-indices SuccessfulCreate Created pod: 04-check-indices-vlw8c job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:36 +0000 UTC Normal Pod 04-check-indices-vlw8c AddedInterface Add eth0 [10.129.2.35/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:36 +0000 UTC Normal Pod 04-check-indices-vlw8c.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:36 +0000 UTC Normal Pod 04-check-indices-vlw8c.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:36 +0000 UTC Normal Pod 04-check-indices-vlw8c.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:39 +0000 UTC Normal Job.batch 04-check-indices Completed Job completed job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:47 +0000 UTC Normal Pod 03-report-span-jjl7j Binding Scheduled Successfully assigned kuttl-test-content-bear/03-report-span-jjl7j to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:47 +0000 UTC Normal Job.batch 03-report-span SuccessfulCreate Created pod: 03-report-span-jjl7j job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:48 +0000 UTC Normal Pod 03-report-span-jjl7j AddedInterface Add eth0 [10.129.2.36/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:48 +0000 UTC Normal Pod 03-report-span-jjl7j.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:48 +0000 UTC Normal Pod 03-report-span-jjl7j.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:48 +0000 UTC Normal Pod 03-report-span-jjl7j.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:56:52 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod my-jaeger-collector-74dd5d98f7-xrtr4 horizontal-pod-autoscaler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:57:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28260417-szlkj Binding Scheduled Successfully assigned kuttl-test-content-bear/my-jaeger-es-lookback-28260417-szlkj to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:57:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28260417-szlkj AddedInterface Add eth0 [10.129.2.37/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:57:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28260417-szlkj.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:57:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28260417-szlkj.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:57:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28260417-szlkj.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:57:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28260417 SuccessfulCreate Created pod: my-jaeger-es-lookback-28260417-szlkj job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:57:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28260417 cronjob-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:57:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28260417-hmghz Binding Scheduled Successfully assigned kuttl-test-content-bear/my-jaeger-es-rollover-28260417-hmghz to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:57:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28260417-hmghz AddedInterface Add eth0 [10.129.2.38/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:57:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28260417-hmghz.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:57:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28260417-hmghz.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:57:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28260417-hmghz.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:57:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28260417 SuccessfulCreate Created pod: my-jaeger-es-rollover-28260417-hmghz job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:57:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28260417 cronjob-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:57:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28260417 Completed Job completed job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:57:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28260417, status: Complete cronjob-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:57:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28260417 Completed Job completed job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:57:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28260417, status: Complete cronjob-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:57:11 +0000 UTC Normal Job.batch 03-report-span Completed Job completed job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28260418-s7k2m Binding Scheduled Successfully assigned kuttl-test-content-bear/my-jaeger-es-lookback-28260418-s7k2m to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28260418-s7k2m AddedInterface Add eth0 [10.129.2.40/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28260418-s7k2m.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28260418-s7k2m.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28260418-s7k2m.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28260418 SuccessfulCreate Created pod: my-jaeger-es-lookback-28260418-s7k2m job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28260418 cronjob-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28260418-vhcf8 Binding Scheduled Successfully assigned kuttl-test-content-bear/my-jaeger-es-rollover-28260418-vhcf8 to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28260418-vhcf8 AddedInterface Add eth0 [10.129.2.39/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28260418-vhcf8.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28260418-vhcf8.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28260418-vhcf8.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28260418 SuccessfulCreate Created pod: my-jaeger-es-rollover-28260418-vhcf8 job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28260418 cronjob-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28260418 Completed Job completed job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28260418, status: Complete cronjob-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28260418 Completed Job completed job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28260418, status: Complete cronjob-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:12 +0000 UTC Normal Pod 05-check-indices-hlxd9 Binding Scheduled Successfully assigned kuttl-test-content-bear/05-check-indices-hlxd9 to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:12 +0000 UTC Normal Pod 05-check-indices-hlxd9 AddedInterface Add eth0 [10.129.2.41/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:12 +0000 UTC Normal Pod 05-check-indices-hlxd9.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:12 +0000 UTC Normal Pod 05-check-indices-hlxd9.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:12 +0000 UTC Normal Pod 05-check-indices-hlxd9.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:12 +0000 UTC Normal Job.batch 05-check-indices SuccessfulCreate Created pod: 05-check-indices-hlxd9 job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:15 +0000 UTC Normal Job.batch 05-check-indices Completed Job completed job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:16 +0000 UTC Normal Pod 06-check-indices-bwl84 Binding Scheduled Successfully assigned kuttl-test-content-bear/06-check-indices-bwl84 to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:16 +0000 UTC Normal Pod 06-check-indices-bwl84 AddedInterface Add eth0 [10.129.2.42/23] from ovn-kubernetes logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:16 +0000 UTC Normal Pod 06-check-indices-bwl84.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:16 +0000 UTC Normal Pod 06-check-indices-bwl84.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:16 +0000 UTC Normal Pod 06-check-indices-bwl84.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:16 +0000 UTC Normal Job.batch 06-check-indices SuccessfulCreate Created pod: 06-check-indices-bwl84 job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | 2023-09-25 06:58:19 +0000 UTC Normal Job.batch 06-check-indices Completed Job completed job-controller logger.go:42: 06:58:20 | es-rollover-autoprov | Deleting namespace: kuttl-test-content-bear === CONT kuttl/harness/es-increasing-replicas logger.go:42: 06:58:27 | es-increasing-replicas | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:58:27 | es-increasing-replicas | Ignoring check-es-nodes.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:58:27 | es-increasing-replicas | Ignoring openshift-check-es-nodes.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:58:27 | es-increasing-replicas | Creating namespace: kuttl-test-fit-boar logger.go:42: 06:58:27 | es-increasing-replicas/1-install | starting test step 1-install logger.go:42: 06:58:27 | es-increasing-replicas/1-install | Jaeger:kuttl-test-fit-boar/simple-prod created logger.go:42: 06:59:01 | es-increasing-replicas/1-install | test step completed 1-install logger.go:42: 06:59:01 | es-increasing-replicas/2-install | starting test step 2-install logger.go:42: 06:59:01 | es-increasing-replicas/2-install | Jaeger:kuttl-test-fit-boar/simple-prod updated logger.go:42: 06:59:10 | es-increasing-replicas/2-install | test step completed 2-install logger.go:42: 06:59:10 | es-increasing-replicas/3-smoke-test | starting test step 3-smoke-test logger.go:42: 06:59:10 | es-increasing-replicas/3-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 06:59:12 | es-increasing-replicas/3-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 06:59:18 | es-increasing-replicas/3-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 06:59:18 | es-increasing-replicas/3-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 06:59:19 | es-increasing-replicas/3-smoke-test | job.batch/report-span created logger.go:42: 06:59:19 | es-increasing-replicas/3-smoke-test | job.batch/check-span created logger.go:42: 06:59:31 | es-increasing-replicas/3-smoke-test | test step completed 3-smoke-test logger.go:42: 06:59:31 | es-increasing-replicas/4-install | starting test step 4-install logger.go:42: 06:59:31 | es-increasing-replicas/4-install | Jaeger:kuttl-test-fit-boar/simple-prod updated logger.go:42: 06:59:31 | es-increasing-replicas/4-install | test step completed 4-install logger.go:42: 06:59:31 | es-increasing-replicas/5-check-es-nodes | starting test step 5-check-es-nodes logger.go:42: 06:59:31 | es-increasing-replicas/5-check-es-nodes | running command: [sh -c ./check-es-nodes.sh $NAMESPACE] logger.go:42: 06:59:31 | es-increasing-replicas/5-check-es-nodes | Checking if the number of ES instances is the expected logger.go:42: 06:59:31 | es-increasing-replicas/5-check-es-nodes | false logger.go:42: 06:59:31 | es-increasing-replicas/5-check-es-nodes | Error: no matches found logger.go:42: 06:59:36 | es-increasing-replicas/5-check-es-nodes | Checking if the number of ES instances is the expected logger.go:42: 06:59:36 | es-increasing-replicas/5-check-es-nodes | true logger.go:42: 06:59:36 | es-increasing-replicas/5-check-es-nodes | test step completed 5-check-es-nodes logger.go:42: 06:59:36 | es-increasing-replicas | es-increasing-replicas events from ns kuttl-test-fit-boar: logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitboarsimpleprod-1-75f4f769c7-rx6c6 Binding Scheduled Successfully assigned kuttl-test-fit-boar/elasticsearch-cdm-kuttltestfitboarsimpleprod-1-75f4f769c7-rx6c6 to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitboarsimpleprod-1-75f4f769c7-rx6c6 AddedInterface Add eth0 [10.129.2.43/23] from ovn-kubernetes logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitboarsimpleprod-1-75f4f769c7-rx6c6.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitboarsimpleprod-1-75f4f769c7-rx6c6.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitboarsimpleprod-1-75f4f769c7-rx6c6.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitboarsimpleprod-1-75f4f769c7-rx6c6.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitboarsimpleprod-1-75f4f769c7-rx6c6.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitboarsimpleprod-1-75f4f769c7-rx6c6.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:33 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestfitboarsimpleprod-1-75f4f769c7 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestfitboarsimpleprod-1-75f4f769c7-rx6c6 replicaset-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:33 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestfitboarsimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestfitboarsimpleprod-1-75f4f769c7 to 1 deployment-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:43 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestfitboarsimpleprod-1-75f4f769c7-rx6c6.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:48 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestfitboarsimpleprod-1-75f4f769c7-rx6c6.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:58 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-5499b86c46 to 1 deployment-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:59 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-ljvrl Binding Scheduled Successfully assigned kuttl-test-fit-boar/simple-prod-collector-5499b86c46-ljvrl to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:59 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-ljvrl AddedInterface Add eth0 [10.128.2.25/23] from ovn-kubernetes logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:59 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-ljvrl.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:59 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-ljvrl.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:59 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-ljvrl.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:59 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5499b86c46 SuccessfulCreate Created pod: simple-prod-collector-5499b86c46-ljvrl replicaset-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:59 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-hmt4k Binding Scheduled Successfully assigned kuttl-test-fit-boar/simple-prod-query-7c775cfb68-hmt4k to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:59 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-hmt4k AddedInterface Add eth0 [10.131.0.34/23] from ovn-kubernetes logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:59 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-hmt4k.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:59 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-hmt4k.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:59 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-hmt4k.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:59 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-hmt4k.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:59 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-hmt4k.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:59 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-hmt4k.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:59 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-hmt4k.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:59 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-hmt4k.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:59 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-hmt4k.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:59 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7c775cfb68 SuccessfulCreate Created pod: simple-prod-query-7c775cfb68-hmt4k replicaset-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:58:59 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-7c775cfb68 to 1 deployment-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:05 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-zzw97 Binding Scheduled Successfully assigned kuttl-test-fit-boar/simple-prod-collector-5499b86c46-zzw97 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:05 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-zzw97 AddedInterface Add eth0 [10.131.0.35/23] from ovn-kubernetes logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:05 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-zzw97.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:05 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-zzw97.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:05 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-zzw97.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:05 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5499b86c46 SuccessfulCreate Created pod: simple-prod-collector-5499b86c46-zzw97 replicaset-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:05 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-5499b86c46 to 2 from 1 deployment-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:05 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-c9ndf Binding Scheduled Successfully assigned kuttl-test-fit-boar/simple-prod-query-7c775cfb68-c9ndf to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:05 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-c9ndf AddedInterface Add eth0 [10.128.2.26/23] from ovn-kubernetes logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:05 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-c9ndf.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:05 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7c775cfb68 SuccessfulCreate Created pod: simple-prod-query-7c775cfb68-c9ndf replicaset-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:05 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-7c775cfb68 to 2 from 1 deployment-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:07 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-c9ndf.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" in 1.658656851s (1.658668802s including waiting) kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:07 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-c9ndf.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:07 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-c9ndf.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:07 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-c9ndf.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:07 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-c9ndf.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:07 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-c9ndf.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:07 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-c9ndf.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:08 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-c9ndf.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" in 1.260049596s (1.260060126s including waiting) kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:08 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-c9ndf.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:08 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-c9ndf.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:13 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-c9ndf.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:13 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-c9ndf.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:13 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-c9ndf.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:13 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-hmt4k.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:13 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-hmt4k.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:13 +0000 UTC Normal Pod simple-prod-query-7c775cfb68-hmt4k.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:13 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7c775cfb68 SuccessfulDelete Deleted pod: simple-prod-query-7c775cfb68-c9ndf replicaset-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:13 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7c775cfb68 SuccessfulDelete Deleted pod: simple-prod-query-7c775cfb68-hmt4k replicaset-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:13 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-7c775cfb68 to 0 from 2 deployment-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:14 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-7xlqs Binding Scheduled Successfully assigned kuttl-test-fit-boar/simple-prod-query-7cd6fbc8d8-7xlqs to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:14 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-7xlqs AddedInterface Add eth0 [10.131.0.36/23] from ovn-kubernetes logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:14 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-7xlqs.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:14 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-7xlqs.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:14 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-7xlqs.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:14 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-7xlqs.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:14 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-7xlqs.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:14 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-7xlqs.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:14 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-7xlqs.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:14 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-jqxwh Binding Scheduled Successfully assigned kuttl-test-fit-boar/simple-prod-query-7cd6fbc8d8-jqxwh to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:14 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-jqxwh AddedInterface Add eth0 [10.128.2.27/23] from ovn-kubernetes logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:14 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-jqxwh.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:14 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-jqxwh.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:14 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-jqxwh.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:14 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-jqxwh.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:14 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-jqxwh.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:14 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-jqxwh.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:14 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-jqxwh.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:14 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7cd6fbc8d8 SuccessfulCreate Created pod: simple-prod-query-7cd6fbc8d8-7xlqs replicaset-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:14 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7cd6fbc8d8 SuccessfulCreate Created pod: simple-prod-query-7cd6fbc8d8-jqxwh replicaset-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:14 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-7cd6fbc8d8 to 2 deployment-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:15 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-7xlqs.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:15 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-7xlqs.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:15 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-jqxwh.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:15 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-jqxwh.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:19 +0000 UTC Normal Pod check-span-9xfjm Binding Scheduled Successfully assigned kuttl-test-fit-boar/check-span-9xfjm to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:19 +0000 UTC Normal Pod check-span-9xfjm AddedInterface Add eth0 [10.131.0.37/23] from ovn-kubernetes logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:19 +0000 UTC Normal Pod check-span-9xfjm.spec.containers{asserts-container} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:19 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-9xfjm job-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:19 +0000 UTC Normal Pod report-span-f8f8m Binding Scheduled Successfully assigned kuttl-test-fit-boar/report-span-f8f8m to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:19 +0000 UTC Normal Pod report-span-f8f8m AddedInterface Add eth0 [10.128.2.28/23] from ovn-kubernetes logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:19 +0000 UTC Normal Pod report-span-f8f8m.spec.containers{report-span} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:19 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-f8f8m job-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:20 +0000 UTC Normal Pod check-span-9xfjm.spec.containers{asserts-container} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" in 1.217889191s (1.217902411s including waiting) kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:20 +0000 UTC Normal Pod check-span-9xfjm.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:20 +0000 UTC Normal Pod check-span-9xfjm.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:20 +0000 UTC Normal Pod report-span-f8f8m.spec.containers{report-span} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" in 1.232622421s (1.232636562s including waiting) kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:20 +0000 UTC Normal Pod report-span-f8f8m.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:20 +0000 UTC Normal Pod report-span-f8f8m.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:31 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitboarsimpleprod-2-bdbd74b49-cfqc5 Binding Scheduled Successfully assigned kuttl-test-fit-boar/elasticsearch-cdm-kuttltestfitboarsimpleprod-2-bdbd74b49-cfqc5 to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:32 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestfitboarsimpleprod-2-bdbd74b49 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestfitboarsimpleprod-2-bdbd74b49-cfqc5 replicaset-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:32 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestfitboarsimpleprod-2 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestfitboarsimpleprod-2-bdbd74b49 to 1 deployment-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:32 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-ljvrl.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:32 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-zzw97.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:32 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5499b86c46 SuccessfulDelete Deleted pod: simple-prod-collector-5499b86c46-zzw97 replicaset-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:32 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5499b86c46 SuccessfulDelete Deleted pod: simple-prod-collector-5499b86c46-ljvrl replicaset-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:32 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled down replica set simple-prod-collector-5499b86c46 to 0 from 2 deployment-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:32 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-7xlqs.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:32 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-7xlqs.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:32 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-7xlqs.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:32 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-jqxwh.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:32 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-jqxwh.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:32 +0000 UTC Normal Pod simple-prod-query-7cd6fbc8d8-jqxwh.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:32 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7cd6fbc8d8 SuccessfulDelete Deleted pod: simple-prod-query-7cd6fbc8d8-7xlqs replicaset-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:32 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7cd6fbc8d8 SuccessfulDelete Deleted pod: simple-prod-query-7cd6fbc8d8-jqxwh replicaset-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:32 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-7cd6fbc8d8 to 0 from 2 deployment-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:32 +0000 UTC Warning Endpoints simple-prod-query FailedToUpdateEndpoint Failed to update endpoint kuttl-test-fit-boar/simple-prod-query: Operation cannot be fulfilled on endpoints "simple-prod-query": the object has been modified; please apply your changes to the latest version and try again endpoint-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitboarsimpleprod-2-bdbd74b49-cfqc5 AddedInterface Add eth0 [10.128.2.29/23] from ovn-kubernetes logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitboarsimpleprod-2-bdbd74b49-cfqc5.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitboarsimpleprod-2-bdbd74b49-cfqc5.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitboarsimpleprod-2-bdbd74b49-cfqc5.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitboarsimpleprod-2-bdbd74b49-cfqc5.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitboarsimpleprod-2-bdbd74b49-cfqc5.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitboarsimpleprod-2-bdbd74b49-cfqc5.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-8pttd Binding Scheduled Successfully assigned kuttl-test-fit-boar/simple-prod-collector-55656dcb65-8pttd to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-8pttd AddedInterface Add eth0 [10.131.0.38/23] from ovn-kubernetes logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-8pttd.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-8pttd.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-8pttd.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-lmgw9 Binding Scheduled Successfully assigned kuttl-test-fit-boar/simple-prod-collector-55656dcb65-lmgw9 to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-55656dcb65 SuccessfulCreate Created pod: simple-prod-collector-55656dcb65-8pttd replicaset-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-55656dcb65 SuccessfulCreate Created pod: simple-prod-collector-55656dcb65-lmgw9 replicaset-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-55656dcb65 to 2 deployment-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod simple-prod-query-cd687bc75-42m46 Binding Scheduled Successfully assigned kuttl-test-fit-boar/simple-prod-query-cd687bc75-42m46 to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod simple-prod-query-cd687bc75-42m46 AddedInterface Add eth0 [10.128.2.30/23] from ovn-kubernetes logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod simple-prod-query-cd687bc75-42m46.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod simple-prod-query-cd687bc75-gdfq4 Binding Scheduled Successfully assigned kuttl-test-fit-boar/simple-prod-query-cd687bc75-gdfq4 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod simple-prod-query-cd687bc75-gdfq4 AddedInterface Add eth0 [10.131.0.39/23] from ovn-kubernetes logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod simple-prod-query-cd687bc75-gdfq4.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod simple-prod-query-cd687bc75-gdfq4.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod simple-prod-query-cd687bc75-gdfq4.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Pod simple-prod-query-cd687bc75-gdfq4.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal ReplicaSet.apps simple-prod-query-cd687bc75 SuccessfulCreate Created pod: simple-prod-query-cd687bc75-gdfq4 replicaset-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal ReplicaSet.apps simple-prod-query-cd687bc75 SuccessfulCreate Created pod: simple-prod-query-cd687bc75-42m46 replicaset-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:33 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-cd687bc75 to 2 deployment-controller logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:34 +0000 UTC Warning Pod simple-prod-collector-55656dcb65-8pttd.spec.containers{jaeger-collector} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:34 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-lmgw9 AddedInterface Add eth0 [10.129.2.44/23] from ovn-kubernetes logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:34 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-lmgw9.spec.containers{jaeger-collector} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:34 +0000 UTC Normal Pod simple-prod-query-cd687bc75-42m46.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:34 +0000 UTC Normal Pod simple-prod-query-cd687bc75-42m46.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:34 +0000 UTC Normal Pod simple-prod-query-cd687bc75-42m46.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:34 +0000 UTC Normal Pod simple-prod-query-cd687bc75-42m46.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:34 +0000 UTC Normal Pod simple-prod-query-cd687bc75-42m46.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:34 +0000 UTC Normal Pod simple-prod-query-cd687bc75-42m46.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:34 +0000 UTC Normal Pod simple-prod-query-cd687bc75-42m46.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:34 +0000 UTC Normal Pod simple-prod-query-cd687bc75-42m46.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:34 +0000 UTC Normal Pod simple-prod-query-cd687bc75-gdfq4.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:34 +0000 UTC Normal Pod simple-prod-query-cd687bc75-gdfq4.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:34 +0000 UTC Normal Pod simple-prod-query-cd687bc75-gdfq4.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:34 +0000 UTC Normal Pod simple-prod-query-cd687bc75-gdfq4.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:59:36 | es-increasing-replicas | 2023-09-25 06:59:34 +0000 UTC Normal Pod simple-prod-query-cd687bc75-gdfq4.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:59:36 | es-increasing-replicas | Deleting namespace: kuttl-test-fit-boar === CONT kuttl/harness/es-index-cleaner-autoprov logger.go:42: 07:00:11 | es-index-cleaner-autoprov | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:00:11 | es-index-cleaner-autoprov | Creating namespace: kuttl-test-amazing-walleye logger.go:42: 07:00:11 | es-index-cleaner-autoprov/1-install | starting test step 1-install logger.go:42: 07:00:11 | es-index-cleaner-autoprov/1-install | Jaeger:kuttl-test-amazing-walleye/test-es-index-cleaner-with-prefix created logger.go:42: 07:00:48 | es-index-cleaner-autoprov/1-install | test step completed 1-install logger.go:42: 07:00:48 | es-index-cleaner-autoprov/2-report-spans | starting test step 2-report-spans logger.go:42: 07:00:48 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE test-es-index-cleaner-with-prefix /dev/null] logger.go:42: 07:00:49 | es-index-cleaner-autoprov/2-report-spans | Warning: resource jaegers/test-es-index-cleaner-with-prefix is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:00:55 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c DAYS=5 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JOB_NUMBER=00 JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-00-job.yaml] logger.go:42: 07:00:55 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c kubectl apply -f report-span-00-job.yaml -n $NAMESPACE] logger.go:42: 07:00:56 | es-index-cleaner-autoprov/2-report-spans | job.batch/00-report-span created logger.go:42: 07:01:35 | es-index-cleaner-autoprov/2-report-spans | test step completed 2-report-spans logger.go:42: 07:01:35 | es-index-cleaner-autoprov/3-install | starting test step 3-install logger.go:42: 07:01:35 | es-index-cleaner-autoprov/3-install | Jaeger:kuttl-test-amazing-walleye/test-es-index-cleaner-with-prefix updated logger.go:42: 07:01:35 | es-index-cleaner-autoprov/3-install | test step completed 3-install logger.go:42: 07:01:35 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | starting test step 4-wait-es-index-cleaner logger.go:42: 07:01:35 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | running command: [sh -c go run ../../../../cmd-utils/wait-cronjob/main.go --cronjob test-es-index-cleaner-with-prefix-es-index-cleaner --namespace $NAMESPACE] logger.go:42: 07:01:36 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-09-25T07:01:36Z" level=debug msg="Checking if the test-es-index-cleaner-with-prefix-es-index-cleaner CronJob exists" logger.go:42: 07:01:36 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-09-25T07:01:36Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 07:01:36 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-09-25T07:01:36Z" level=warning msg="The BatchV1/Cronjob test-es-index-cleaner-with-prefix-es-index-cleaner was not found" logger.go:42: 07:01:36 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-09-25T07:01:36Z" level=debug msg="Found BatchV/Cronjobs:" logger.go:42: 07:01:36 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-09-25T07:01:36Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 07:01:36 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-09-25T07:01:36Z" level=warning msg="The BatchV1/Cronjob test-es-index-cleaner-with-prefix-es-index-cleaner was not found" logger.go:42: 07:01:36 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-09-25T07:01:36Z" level=debug msg="Found BatchV/Cronjobs:" logger.go:42: 07:01:46 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-09-25T07:01:46Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 07:01:46 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-09-25T07:01:46Z" level=info msg="Cronjob test-es-index-cleaner-with-prefix-es-index-cleaner found successfully" logger.go:42: 07:01:46 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-09-25T07:01:46Z" level=debug msg="Waiting for the next scheduled job from test-es-index-cleaner-with-prefix-es-index-cleaner cronjob" logger.go:42: 07:01:46 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-09-25T07:01:46Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 07:01:46 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-09-25T07:01:46Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 07:01:56 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-09-25T07:01:56Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 07:02:06 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-09-25T07:02:06Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 07:02:16 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-09-25T07:02:16Z" level=info msg="Job of owner test-es-index-cleaner-with-prefix-es-index-cleaner succeeded after test-es-index-cleaner-with-prefix-es-index-cleaner 30.014732889s" logger.go:42: 07:02:16 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | test step completed 4-wait-es-index-cleaner logger.go:42: 07:02:16 | es-index-cleaner-autoprov/5-install | starting test step 5-install logger.go:42: 07:02:16 | es-index-cleaner-autoprov/5-install | Jaeger:kuttl-test-amazing-walleye/test-es-index-cleaner-with-prefix updated logger.go:42: 07:02:16 | es-index-cleaner-autoprov/5-install | test step completed 5-install logger.go:42: 07:02:16 | es-index-cleaner-autoprov/6-check-indices | starting test step 6-check-indices logger.go:42: 07:02:16 | es-index-cleaner-autoprov/6-check-indices | Job:kuttl-test-amazing-walleye/00-check-indices created logger.go:42: 07:02:20 | es-index-cleaner-autoprov/6-check-indices | test step completed 6-check-indices logger.go:42: 07:02:20 | es-index-cleaner-autoprov | es-index-cleaner-autoprov events from ns kuttl-test-amazing-walleye: logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazingwalleyetestesindexcl-1-6crr9m Binding Scheduled Successfully assigned kuttl-test-amazing-walleye/elasticsearch-cdm-kuttltestamazingwalleyetestesindexcl-1-6crr9m to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazingwalleyetestesindexcl-1-6crr9m AddedInterface Add eth0 [10.128.2.31/23] from ovn-kubernetes logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazingwalleyetestesindexcl-1-6crr9m.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazingwalleyetestesindexcl-1-6crr9m.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazingwalleyetestesindexcl-1-6crr9m.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazingwalleyetestesindexcl-1-6crr9m.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazingwalleyetestesindexcl-1-6crr9m.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazingwalleyetestesindexcl-1-6crr9m.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:17 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestamazingwalleyetestesindexcl-1-6d89cb6987 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestamazingwalleyetestesindexcl-1-6crr9m replicaset-controller logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:17 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestamazingwalleyetestesindexcl-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestamazingwalleyetestesindexcl-1-6d89cb6987 to 1 deployment-controller logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:27 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestamazingwalleyetestesindexcl-1-6crr9m.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:32 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestamazingwalleyetestesindexcl-1-6crr9m.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-8659b69c48-hbpls Binding Scheduled Successfully assigned kuttl-test-amazing-walleye/test-es-index-cleaner-with-prefix-collector-8659b69c48-hbpls to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-8659b69c48-hbpls AddedInterface Add eth0 [10.131.0.40/23] from ovn-kubernetes logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-8659b69c48-hbpls.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-8659b69c48-hbpls.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-8659b69c48-hbpls.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:44 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-collector-8659b69c48 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-collector-8659b69c48-hbpls replicaset-controller logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:44 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-collector ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-collector-8659b69c48 to 1 deployment-controller logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-c457b8b5f-rwhxw Binding Scheduled Successfully assigned kuttl-test-amazing-walleye/test-es-index-cleaner-with-prefix-query-c457b8b5f-rwhxw to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-c457b8b5f-rwhxw AddedInterface Add eth0 [10.131.0.41/23] from ovn-kubernetes logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-c457b8b5f-rwhxw.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:44 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-c457b8b5f SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-query-c457b8b5f-rwhxw replicaset-controller logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:44 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-query-c457b8b5f to 1 deployment-controller logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:45 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-c457b8b5f-rwhxw.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:45 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-c457b8b5f-rwhxw.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:45 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-c457b8b5f-rwhxw.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:45 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-c457b8b5f-rwhxw.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:45 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-c457b8b5f-rwhxw.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:45 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-c457b8b5f-rwhxw.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:45 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-c457b8b5f-rwhxw.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:45 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-c457b8b5f-rwhxw.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:52 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-c457b8b5f-rwhxw.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:52 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-c457b8b5f-rwhxw.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:52 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-c457b8b5f-rwhxw.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:52 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-c457b8b5f SuccessfulDelete Deleted pod: test-es-index-cleaner-with-prefix-query-c457b8b5f-rwhxw replicaset-controller logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:52 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled down replica set test-es-index-cleaner-with-prefix-query-c457b8b5f to 0 from 1 deployment-controller logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:53 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-648cd7b668-l4trr Binding Scheduled Successfully assigned kuttl-test-amazing-walleye/test-es-index-cleaner-with-prefix-query-648cd7b668-l4trr to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:53 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-648cd7b668-l4trr AddedInterface Add eth0 [10.131.0.42/23] from ovn-kubernetes logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:53 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-648cd7b668-l4trr.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:53 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-648cd7b668-l4trr.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:53 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-648cd7b668-l4trr.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:53 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-648cd7b668-l4trr.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:53 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-648cd7b668-l4trr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:53 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-648cd7b668-l4trr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:53 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-648cd7b668-l4trr.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:53 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-648cd7b668 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-query-648cd7b668-l4trr replicaset-controller logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:53 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-query-648cd7b668 to 1 deployment-controller logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:54 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-648cd7b668-l4trr.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:54 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-648cd7b668-l4trr.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:56 +0000 UTC Normal Pod 00-report-span-z5srz Binding Scheduled Successfully assigned kuttl-test-amazing-walleye/00-report-span-z5srz to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:56 +0000 UTC Normal Pod 00-report-span-z5srz AddedInterface Add eth0 [10.129.2.46/23] from ovn-kubernetes logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:56 +0000 UTC Normal Pod 00-report-span-z5srz.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:56 +0000 UTC Normal Pod 00-report-span-z5srz.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:56 +0000 UTC Normal Pod 00-report-span-z5srz.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:56 +0000 UTC Normal Job.batch 00-report-span SuccessfulCreate Created pod: 00-report-span-z5srz job-controller logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:59 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:59 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:00:59 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:01:29 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:01:29 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod test-es-index-cleaner-with-prefix-collector-8659b69c48-hbpls horizontal-pod-autoscaler logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:01:29 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:01:35 +0000 UTC Normal Job.batch 00-report-span Completed Job completed job-controller logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:02:00 +0000 UTC Normal Job.batch test-es-index-cleaner-with-prefix-es-index-cleaner-28260422 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-es-index-cleaner-2826042x5npv job-controller logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:02:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2826042x5npv Binding Scheduled Successfully assigned kuttl-test-amazing-walleye/test-es-index-cleaner-with-prefix-es-index-cleaner-2826042x5npv to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:02:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2826042x5npv AddedInterface Add eth0 [10.129.2.47/23] from ovn-kubernetes logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:02:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2826042x5npv.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-index-cleaner-rhel8@sha256:8ac1b958ff16ea16f4d0c7132e3d369848a829d6655e0b2338a9bef93d54f02d" kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:02:00 +0000 UTC Normal CronJob.batch test-es-index-cleaner-with-prefix-es-index-cleaner SuccessfulCreate Created job test-es-index-cleaner-with-prefix-es-index-cleaner-28260422 cronjob-controller logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:02:05 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2826042x5npv.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-index-cleaner-rhel8@sha256:8ac1b958ff16ea16f4d0c7132e3d369848a829d6655e0b2338a9bef93d54f02d" in 4.816531445s (4.816542065s including waiting) kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:02:05 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2826042x5npv.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Created Created container test-es-index-cleaner-with-prefix-es-index-cleaner kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:02:05 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2826042x5npv.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Started Started container test-es-index-cleaner-with-prefix-es-index-cleaner kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:02:08 +0000 UTC Normal Job.batch test-es-index-cleaner-with-prefix-es-index-cleaner-28260422 Completed Job completed job-controller logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:02:08 +0000 UTC Normal CronJob.batch test-es-index-cleaner-with-prefix-es-index-cleaner SawCompletedJob Saw completed job: test-es-index-cleaner-with-prefix-es-index-cleaner-28260422, status: Complete cronjob-controller logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:02:16 +0000 UTC Normal Pod 00-check-indices-92v57 Binding Scheduled Successfully assigned kuttl-test-amazing-walleye/00-check-indices-92v57 to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:02:16 +0000 UTC Normal Pod 00-check-indices-92v57 AddedInterface Add eth0 [10.129.2.48/23] from ovn-kubernetes logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:02:16 +0000 UTC Normal Pod 00-check-indices-92v57.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:02:16 +0000 UTC Normal Job.batch 00-check-indices SuccessfulCreate Created pod: 00-check-indices-92v57 job-controller logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:02:17 +0000 UTC Normal Pod 00-check-indices-92v57.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:02:17 +0000 UTC Normal Pod 00-check-indices-92v57.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:02:20 | es-index-cleaner-autoprov | 2023-09-25 07:02:20 +0000 UTC Normal Job.batch 00-check-indices Completed Job completed job-controller logger.go:42: 07:02:20 | es-index-cleaner-autoprov | Deleting namespace: kuttl-test-amazing-walleye === CONT kuttl/harness/es-from-aio-to-production logger.go:42: 07:02:26 | es-from-aio-to-production | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:02:26 | es-from-aio-to-production | Creating namespace: kuttl-test-present-kodiak logger.go:42: 07:02:26 | es-from-aio-to-production/0-install | starting test step 0-install logger.go:42: 07:02:26 | es-from-aio-to-production/0-install | Jaeger:kuttl-test-present-kodiak/my-jaeger created logger.go:42: 07:02:36 | es-from-aio-to-production/0-install | test step completed 0-install logger.go:42: 07:02:36 | es-from-aio-to-production/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:02:36 | es-from-aio-to-production/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:02:38 | es-from-aio-to-production/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:02:44 | es-from-aio-to-production/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:02:45 | es-from-aio-to-production/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:02:45 | es-from-aio-to-production/1-smoke-test | job.batch/report-span created logger.go:42: 07:02:45 | es-from-aio-to-production/1-smoke-test | job.batch/check-span created logger.go:42: 07:02:56 | es-from-aio-to-production/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:02:56 | es-from-aio-to-production/3-install | starting test step 3-install logger.go:42: 07:02:56 | es-from-aio-to-production/3-install | Jaeger:kuttl-test-present-kodiak/my-jaeger updated logger.go:42: 07:03:28 | es-from-aio-to-production/3-install | test step completed 3-install logger.go:42: 07:03:28 | es-from-aio-to-production/4-smoke-test | starting test step 4-smoke-test logger.go:42: 07:03:28 | es-from-aio-to-production/4-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:03:36 | es-from-aio-to-production/4-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:03:36 | es-from-aio-to-production/4-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:03:37 | es-from-aio-to-production/4-smoke-test | job.batch/report-span unchanged logger.go:42: 07:03:37 | es-from-aio-to-production/4-smoke-test | job.batch/check-span unchanged logger.go:42: 07:03:37 | es-from-aio-to-production/4-smoke-test | test step completed 4-smoke-test logger.go:42: 07:03:37 | es-from-aio-to-production | es-from-aio-to-production events from ns kuttl-test-present-kodiak: logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:30 +0000 UTC Normal Pod my-jaeger-b6c699c5b-h9qx9 Binding Scheduled Successfully assigned kuttl-test-present-kodiak/my-jaeger-b6c699c5b-h9qx9 to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:30 +0000 UTC Normal Pod my-jaeger-b6c699c5b-h9qx9 AddedInterface Add eth0 [10.128.2.32/23] from ovn-kubernetes logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:30 +0000 UTC Normal Pod my-jaeger-b6c699c5b-h9qx9.spec.containers{jaeger} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:30 +0000 UTC Normal ReplicaSet.apps my-jaeger-b6c699c5b SuccessfulCreate Created pod: my-jaeger-b6c699c5b-h9qx9 replicaset-controller logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:30 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-b6c699c5b to 1 deployment-controller logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:34 +0000 UTC Normal Pod my-jaeger-b6c699c5b-h9qx9.spec.containers{jaeger} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" in 4.021257185s (4.021268836s including waiting) kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:34 +0000 UTC Normal Pod my-jaeger-b6c699c5b-h9qx9.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:35 +0000 UTC Normal Pod my-jaeger-b6c699c5b-h9qx9.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:35 +0000 UTC Normal Pod my-jaeger-b6c699c5b-h9qx9.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:35 +0000 UTC Normal Pod my-jaeger-b6c699c5b-h9qx9.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:35 +0000 UTC Normal Pod my-jaeger-b6c699c5b-h9qx9.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:40 +0000 UTC Normal Pod my-jaeger-b6c699c5b-h9qx9.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:40 +0000 UTC Normal Pod my-jaeger-b6c699c5b-h9qx9.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:40 +0000 UTC Normal ReplicaSet.apps my-jaeger-b6c699c5b SuccessfulDelete Deleted pod: my-jaeger-b6c699c5b-h9qx9 replicaset-controller logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:40 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-b6c699c5b to 0 from 1 deployment-controller logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:41 +0000 UTC Normal Pod my-jaeger-df5888688-c6rhw Binding Scheduled Successfully assigned kuttl-test-present-kodiak/my-jaeger-df5888688-c6rhw to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:41 +0000 UTC Normal ReplicaSet.apps my-jaeger-df5888688 SuccessfulCreate Created pod: my-jaeger-df5888688-c6rhw replicaset-controller logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:41 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-df5888688 to 1 deployment-controller logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:42 +0000 UTC Normal Pod my-jaeger-df5888688-c6rhw AddedInterface Add eth0 [10.131.0.43/23] from ovn-kubernetes logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:42 +0000 UTC Normal Pod my-jaeger-df5888688-c6rhw.spec.containers{jaeger} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:43 +0000 UTC Normal Pod my-jaeger-df5888688-c6rhw.spec.containers{jaeger} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" in 1.374270261s (1.374320441s including waiting) kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:43 +0000 UTC Normal Pod my-jaeger-df5888688-c6rhw.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:43 +0000 UTC Normal Pod my-jaeger-df5888688-c6rhw.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:43 +0000 UTC Normal Pod my-jaeger-df5888688-c6rhw.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:43 +0000 UTC Normal Pod my-jaeger-df5888688-c6rhw.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:43 +0000 UTC Normal Pod my-jaeger-df5888688-c6rhw.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:45 +0000 UTC Normal Pod check-span-qzzqv Binding Scheduled Successfully assigned kuttl-test-present-kodiak/check-span-qzzqv to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:45 +0000 UTC Normal Pod check-span-qzzqv AddedInterface Add eth0 [10.129.2.49/23] from ovn-kubernetes logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:45 +0000 UTC Normal Pod check-span-qzzqv.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:45 +0000 UTC Normal Pod check-span-qzzqv.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:45 +0000 UTC Normal Pod check-span-qzzqv.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:45 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-qzzqv job-controller logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:45 +0000 UTC Normal Pod report-span-txr4w Binding Scheduled Successfully assigned kuttl-test-present-kodiak/report-span-txr4w to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:45 +0000 UTC Normal Pod report-span-txr4w AddedInterface Add eth0 [10.128.2.33/23] from ovn-kubernetes logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:45 +0000 UTC Normal Pod report-span-txr4w.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:45 +0000 UTC Normal Pod report-span-txr4w.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:45 +0000 UTC Normal Pod report-span-txr4w.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:45 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-txr4w job-controller logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:56 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:58 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestpresentkodiakmyjaeger-1-7649499cbc SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestpresentkodiakmyjaeger-1-7649499vs4z4 replicaset-controller logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:58 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpresentkodiakmyjaeger-1-7649499vs4z4 Binding Scheduled Successfully assigned kuttl-test-present-kodiak/elasticsearch-cdm-kuttltestpresentkodiakmyjaeger-1-7649499vs4z4 to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:58 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestpresentkodiakmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestpresentkodiakmyjaeger-1-7649499cbc to 1 deployment-controller logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpresentkodiakmyjaeger-1-7649499vs4z4 AddedInterface Add eth0 [10.129.2.50/23] from ovn-kubernetes logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpresentkodiakmyjaeger-1-7649499vs4z4.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpresentkodiakmyjaeger-1-7649499vs4z4.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpresentkodiakmyjaeger-1-7649499vs4z4.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpresentkodiakmyjaeger-1-7649499vs4z4.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpresentkodiakmyjaeger-1-7649499vs4z4.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:02:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpresentkodiakmyjaeger-1-7649499vs4z4.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:09 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpresentkodiakmyjaeger-1-7649499vs4z4.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:14 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpresentkodiakmyjaeger-1-7649499vs4z4.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:18 +0000 UTC Normal Job.batch report-span Completed Job completed job-controller logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:25 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-f899r Binding Scheduled Successfully assigned kuttl-test-present-kodiak/my-jaeger-collector-558ccfc8dd-f899r to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:25 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-558ccfc8dd SuccessfulCreate Created pod: my-jaeger-collector-558ccfc8dd-f899r replicaset-controller logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:25 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-558ccfc8dd to 1 deployment-controller logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:25 +0000 UTC Normal Pod my-jaeger-df5888688-c6rhw.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:25 +0000 UTC Normal Pod my-jaeger-df5888688-c6rhw.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:25 +0000 UTC Normal Pod my-jaeger-query-c99746c8b-jknbz Binding Scheduled Successfully assigned kuttl-test-present-kodiak/my-jaeger-query-c99746c8b-jknbz to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:25 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-c99746c8b SuccessfulCreate Created pod: my-jaeger-query-c99746c8b-jknbz replicaset-controller logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:25 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-c99746c8b to 1 deployment-controller logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:26 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-f899r AddedInterface Add eth0 [10.128.2.34/23] from ovn-kubernetes logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:26 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-f899r.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:26 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-f899r.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:26 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-f899r.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:26 +0000 UTC Normal Pod my-jaeger-query-c99746c8b-jknbz AddedInterface Add eth0 [10.128.2.35/23] from ovn-kubernetes logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:26 +0000 UTC Normal Pod my-jaeger-query-c99746c8b-jknbz.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:26 +0000 UTC Normal Pod my-jaeger-query-c99746c8b-jknbz.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:26 +0000 UTC Normal Pod my-jaeger-query-c99746c8b-jknbz.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:26 +0000 UTC Normal Pod my-jaeger-query-c99746c8b-jknbz.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:26 +0000 UTC Normal Pod my-jaeger-query-c99746c8b-jknbz.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:26 +0000 UTC Normal Pod my-jaeger-query-c99746c8b-jknbz.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:26 +0000 UTC Normal Pod my-jaeger-query-c99746c8b-jknbz.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:26 +0000 UTC Normal Pod my-jaeger-query-c99746c8b-jknbz.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | 2023-09-25 07:03:26 +0000 UTC Normal Pod my-jaeger-query-c99746c8b-jknbz.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:03:37 | es-from-aio-to-production | Deleting namespace: kuttl-test-present-kodiak === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (870.35s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.14s) --- PASS: kuttl/harness/es-multiinstance (111.38s) --- PASS: kuttl/harness/es-streaming-autoprovisioned (182.70s) --- PASS: kuttl/harness/es-simple-prod (5.76s) --- PASS: kuttl/harness/es-rollover-autoprov (248.18s) --- PASS: kuttl/harness/es-increasing-replicas (104.26s) --- PASS: kuttl/harness/es-index-cleaner-autoprov (135.22s) --- PASS: kuttl/harness/es-from-aio-to-production (76.65s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name elasticsearch --report --output /logs/artifacts/elasticsearch.xml ./artifacts/kuttl-report.xml time="2023-09-25T07:03:44Z" level=debug msg="Setting a new name for the test suites" time="2023-09-25T07:03:44Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-09-25T07:03:44Z" level=debug msg="normalizing test case names" time="2023-09-25T07:03:44Z" level=debug msg="elasticsearch/artifacts -> elasticsearch_artifacts" time="2023-09-25T07:03:44Z" level=debug msg="elasticsearch/es-multiinstance -> elasticsearch_es_multiinstance" time="2023-09-25T07:03:44Z" level=debug msg="elasticsearch/es-streaming-autoprovisioned -> elasticsearch_es_streaming_autoprovisioned" time="2023-09-25T07:03:44Z" level=debug msg="elasticsearch/es-simple-prod -> elasticsearch_es_simple_prod" time="2023-09-25T07:03:44Z" level=debug msg="elasticsearch/es-rollover-autoprov -> elasticsearch_es_rollover_autoprov" time="2023-09-25T07:03:44Z" level=debug msg="elasticsearch/es-increasing-replicas -> elasticsearch_es_increasing_replicas" time="2023-09-25T07:03:44Z" level=debug msg="elasticsearch/es-index-cleaner-autoprov -> elasticsearch_es_index_cleaner_autoprov" time="2023-09-25T07:03:44Z" level=debug msg="elasticsearch/es-from-aio-to-production -> elasticsearch_es_from_aio_to_production" +--------------------------------------------+--------+ | NAME | RESULT | +--------------------------------------------+--------+ | elasticsearch_artifacts | passed | | elasticsearch_es_multiinstance | passed | | elasticsearch_es_streaming_autoprovisioned | passed | | elasticsearch_es_simple_prod | passed | | elasticsearch_es_rollover_autoprov | passed | | elasticsearch_es_increasing_replicas | passed | | elasticsearch_es_index_cleaner_autoprov | passed | | elasticsearch_es_from_aio_to_production | passed | +--------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + '[' 0 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh examples false true + '[' 3 -ne 3 ']' + test_suite_name=examples + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/examples.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-examples make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=0.32.0 \ SKIP_KAFKA=false \ VERTX_IMG=jaegertracing/vertx-create-span:operator-e2e-tests \ ./tests/e2e/examples/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-24-044110 True False 22m Cluster version is 4.14.0-0.nightly-2023-09-24-044110' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-24-044110 True False 22m Cluster version is 4.14.0-0.nightly-2023-09-24-044110' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 0.32.0 ']' ++ version_le 0.32.0 0.25.0 +++ echo 0.32.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 0.32.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/examples/render.sh ++ export SUITE_DIR=./tests/e2e/examples ++ SUITE_DIR=./tests/e2e/examples ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/examples ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test examples-agent-as-daemonset + '[' 1 -ne 1 ']' + test_name=examples-agent-as-daemonset + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-as-daemonset' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-as-daemonset\e[0m' Rendering files for test examples-agent-as-daemonset + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build + '[' _build '!=' _build ']' + mkdir -p examples-agent-as-daemonset + cd examples-agent-as-daemonset + example_name=agent-as-daemonset + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + render_install_example agent-as-daemonset 01 + '[' 2 -ne 2 ']' + example_name=agent-as-daemonset + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/agent-as-daemonset.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + JAEGER_NAME=agent-as-daemonset + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=DaemonSet ++ '[' DaemonSet = null ']' ++ echo DaemonSet ++ return 0 + jaeger_strategy=DaemonSet + '[' DaemonSet = DaemonSet ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example agent-as-daemonset 02 + '[' 2 -ne 2 ']' + example_name=agent-as-daemonset + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/agent-as-daemonset.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/agent-as-daemonset.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/agent-as-daemonset.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/agent-as-daemonset.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + jaeger_name=agent-as-daemonset + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test agent-as-daemonset true 02 + '[' 3 -ne 3 ']' + jaeger=agent-as-daemonset + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-agent-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-agent-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-with-priority-class\e[0m' Rendering files for test examples-agent-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-as-daemonset + '[' examples-agent-as-daemonset '!=' _build ']' + cd .. + mkdir -p examples-agent-with-priority-class + cd examples-agent-with-priority-class + example_name=agent-with-priority-class + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + render_install_example agent-with-priority-class 01 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/agent-with-priority-class.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + JAEGER_NAME=agent-as-daemonset + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=DaemonSet ++ '[' DaemonSet = null ']' ++ echo DaemonSet ++ return 0 + jaeger_strategy=DaemonSet + '[' DaemonSet = DaemonSet ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example agent-with-priority-class 02 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + jaeger_name=agent-as-daemonset + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test agent-as-daemonset true 02 + '[' 3 -ne 3 ']' + jaeger=agent-as-daemonset + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-all-in-one-with-options + '[' 1 -ne 1 ']' + test_name=examples-all-in-one-with-options + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-all-in-one-with-options' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-all-in-one-with-options\e[0m' Rendering files for test examples-all-in-one-with-options + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-with-priority-class + '[' examples-agent-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-all-in-one-with-options + cd examples-all-in-one-with-options + example_name=all-in-one-with-options + render_install_example all-in-one-with-options 00 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/all-in-one-with-options.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + JAEGER_NAME=my-jaeger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.metadata.name="my-jaeger"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i 'del(.spec.allInOne.image)' ./00-install.yaml + render_smoke_test_example all-in-one-with-options 01 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + jaeger_name=my-jaeger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + sed -i s~my-jaeger-query:443~my-jaeger-query:443/jaeger~gi ./01-smoke-test.yaml + '[' false = true ']' + start_test examples-auto-provision-kafka + '[' 1 -ne 1 ']' + test_name=examples-auto-provision-kafka + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-auto-provision-kafka' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-auto-provision-kafka\e[0m' Rendering files for test examples-auto-provision-kafka + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-all-in-one-with-options + '[' examples-all-in-one-with-options '!=' _build ']' + cd .. + mkdir -p examples-auto-provision-kafka + cd examples-auto-provision-kafka + example_name=auto-provision-kafka + render_install_kafka_operator 01 + '[' 1 -ne 1 ']' + test_step=01 + '[' true '!=' true ']' + render_install_example auto-provision-kafka 02 + '[' 2 -ne 2 ']' + example_name=auto-provision-kafka + test_step=02 + install_file=./02-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/auto-provision-kafka.yaml -o ./02-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./02-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./02-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./02-install.yaml ++ jaeger_name=auto-provision-kafka ++ '[' -z auto-provision-kafka ']' ++ echo auto-provision-kafka ++ return 0 + JAEGER_NAME=auto-provision-kafka + local jaeger_strategy ++ get_jaeger_strategy ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./02-install.yaml ++ strategy=streaming ++ '[' streaming = production ']' ++ '[' streaming = streaming ']' ++ echo streaming ++ return 0 + jaeger_strategy=streaming + '[' streaming = DaemonSet ']' + '[' streaming = allInOne ']' + '[' streaming = production ']' + '[' streaming = streaming ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/streaming-jaeger-assert.yaml.template -o ./02-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./02-install.yaml + mv ./02-assert.yaml ./05-assert.yaml + render_assert_kafka true auto-provision-kafka 02 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provision-kafka + test_step=02 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provision-kafka + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./02-assert.yaml ++ expr 02 + 1 + CLUSTER_NAME=auto-provision-kafka + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./3-assert.yaml ++ expr 02 + 2 + CLUSTER_NAME=auto-provision-kafka + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./04-assert.yaml + render_smoke_test_example auto-provision-kafka 06 + '[' 2 -ne 2 ']' + example_name=auto-provision-kafka + test_step=06 + deployment_file=/tmp/jaeger-tests/examples/auto-provision-kafka.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/auto-provision-kafka.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/auto-provision-kafka.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/auto-provision-kafka.yaml ++ jaeger_name=auto-provision-kafka ++ '[' -z auto-provision-kafka ']' ++ echo auto-provision-kafka ++ return 0 + jaeger_name=auto-provision-kafka + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test auto-provision-kafka true 06 + '[' 3 -ne 3 ']' + jaeger=auto-provision-kafka + is_secured=true + test_step=06 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://auto-provision-kafka-query:443 + JAEGER_QUERY_ENDPOINT=https://auto-provision-kafka-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://auto-provision-kafka-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://auto-provision-kafka-collector-headless:14268 + export JAEGER_NAME=auto-provision-kafka + JAEGER_NAME=auto-provision-kafka + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./06-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-business-application-injected-sidecar + '[' 1 -ne 1 ']' + test_name=examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-business-application-injected-sidecar' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-business-application-injected-sidecar\e[0m' Rendering files for test examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-auto-provision-kafka + '[' examples-auto-provision-kafka '!=' _build ']' + cd .. + mkdir -p examples-business-application-injected-sidecar + cd examples-business-application-injected-sidecar + example_name=simplest + cp /tmp/jaeger-tests/examples/business-application-injected-sidecar.yaml ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].image=strenv(VERTX_IMG)' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.path="/"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.port=8080' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.initialDelaySeconds=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.failureThreshold=3' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.periodSeconds=10' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.successThreshold=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.timeoutSeconds=1' ./00-install.yaml + render_install_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example simplest 02 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 02 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-collector-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-collector-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-collector-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-collector-with-priority-class\e[0m' Rendering files for test examples-collector-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-business-application-injected-sidecar + '[' examples-business-application-injected-sidecar '!=' _build ']' + cd .. + mkdir -p examples-collector-with-priority-class + cd examples-collector-with-priority-class + example_name=collector-with-priority-class + render_install_example collector-with-priority-class 00 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/collector-with-priority-class.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + JAEGER_NAME=collector-with-high-priority + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example collector-with-priority-class 01 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + jaeger_name=collector-with-high-priority + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test collector-with-high-priority true 01 + '[' 3 -ne 3 ']' + jaeger=collector-with-high-priority + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + export JAEGER_NAME=collector-with-high-priority + JAEGER_NAME=collector-with-high-priority + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-service-types + '[' 1 -ne 1 ']' + test_name=examples-service-types + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-service-types' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-service-types\e[0m' Rendering files for test examples-service-types + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-collector-with-priority-class + '[' examples-collector-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-service-types + cd examples-service-types + example_name=service-types + render_install_example service-types 00 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/service-types.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + JAEGER_NAME=service-types + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example service-types 01 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/service-types.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/service-types.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/service-types.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/service-types.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + jaeger_name=service-types + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test service-types true 01 + '[' 3 -ne 3 ']' + jaeger=service-types + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + export JAEGER_NAME=service-types + JAEGER_NAME=service-types + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod + '[' 1 -ne 1 ']' + test_name=examples-simple-prod + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod\e[0m' Rendering files for test examples-simple-prod + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-service-types + '[' examples-service-types '!=' _build ']' + cd .. + mkdir -p examples-simple-prod + cd examples-simple-prod + example_name=simple-prod + render_install_example simple-prod 01 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod 02 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod-with-volumes + '[' 1 -ne 1 ']' + test_name=examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod-with-volumes' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod-with-volumes\e[0m' Rendering files for test examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod + '[' examples-simple-prod '!=' _build ']' + cd .. + mkdir -p examples-simple-prod-with-volumes + cd examples-simple-prod-with-volumes + example_name=simple-prod-with-volumes + render_install_example simple-prod-with-volumes 01 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod-with-volumes 02 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + /tmp/jaeger-tests/bin/gomplate -f ./03-check-volume.yaml.template -o 03-check-volume.yaml + start_test examples-simplest + '[' 1 -ne 1 ']' + test_name=examples-simplest + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simplest' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simplest\e[0m' Rendering files for test examples-simplest + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod-with-volumes + '[' examples-simple-prod-with-volumes '!=' _build ']' + cd .. + mkdir -p examples-simplest + cd examples-simplest + example_name=simplest + render_install_example simplest 00 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 01 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger + '[' 1 -ne 1 ']' + test_name=examples-with-badger + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger\e[0m' Rendering files for test examples-with-badger + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simplest + '[' examples-simplest '!=' _build ']' + cd .. + mkdir -p examples-with-badger + cd examples-with-badger + example_name=with-badger + render_install_example with-badger 00 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + JAEGER_NAME=with-badger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger 01 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + jaeger_name=with-badger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + export JAEGER_NAME=with-badger + JAEGER_NAME=with-badger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger-and-volume + '[' 1 -ne 1 ']' + test_name=examples-with-badger-and-volume + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger-and-volume' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger-and-volume\e[0m' Rendering files for test examples-with-badger-and-volume + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger + '[' examples-with-badger '!=' _build ']' + cd .. + mkdir -p examples-with-badger-and-volume + cd examples-with-badger-and-volume + example_name=with-badger-and-volume + render_install_example with-badger-and-volume 00 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger-and-volume.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + JAEGER_NAME=with-badger-and-volume + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger-and-volume 01 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + jaeger_name=with-badger-and-volume + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger-and-volume true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger-and-volume + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + export JAEGER_NAME=with-badger-and-volume + JAEGER_NAME=with-badger-and-volume + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-cassandra + '[' 1 -ne 1 ']' + test_name=examples-with-cassandra + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-cassandra' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-cassandra\e[0m' Rendering files for test examples-with-cassandra + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger-and-volume + '[' examples-with-badger-and-volume '!=' _build ']' + cd .. + mkdir -p examples-with-cassandra + cd examples-with-cassandra + example_name=with-cassandra + render_install_cassandra 00 + '[' 1 -ne 1 ']' + test_step=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-assert.yaml.template -o ./00-assert.yaml + render_install_example with-cassandra 01 + '[' 2 -ne 2 ']' + example_name=with-cassandra + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-cassandra.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=with-cassandra ++ '[' -z with-cassandra ']' ++ echo with-cassandra ++ return 0 + JAEGER_NAME=with-cassandra + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example with-cassandra 02 + '[' 2 -ne 2 ']' + example_name=with-cassandra + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/with-cassandra.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-cassandra.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-cassandra.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-cassandra.yaml ++ jaeger_name=with-cassandra ++ '[' -z with-cassandra ']' ++ echo with-cassandra ++ return 0 + jaeger_name=with-cassandra + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-cassandra true 02 + '[' 3 -ne 3 ']' + jaeger=with-cassandra + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 + JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 + export JAEGER_NAME=with-cassandra + JAEGER_NAME=with-cassandra + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-sampling + '[' 1 -ne 1 ']' + test_name=examples-with-sampling + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-sampling' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-sampling\e[0m' Rendering files for test examples-with-sampling + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-cassandra + '[' examples-with-cassandra '!=' _build ']' + cd .. + mkdir -p examples-with-sampling + cd examples-with-sampling + export example_name=with-sampling + example_name=with-sampling + render_install_cassandra 00 + '[' 1 -ne 1 ']' + test_step=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-assert.yaml.template -o ./00-assert.yaml + render_install_example with-sampling 01 + '[' 2 -ne 2 ']' + example_name=with-sampling + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-sampling.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=with-sampling ++ '[' -z with-sampling ']' ++ echo with-sampling ++ return 0 + JAEGER_NAME=with-sampling + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example with-sampling 02 + '[' 2 -ne 2 ']' + example_name=with-sampling + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/with-sampling.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-sampling.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-sampling.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-sampling.yaml ++ jaeger_name=with-sampling ++ '[' -z with-sampling ']' ++ echo with-sampling ++ return 0 + jaeger_name=with-sampling + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-sampling true 02 + '[' 3 -ne 3 ']' + jaeger=with-sampling + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 + JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 + export JAEGER_NAME=with-sampling + JAEGER_NAME=with-sampling + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + start_test examples-openshift-agent-as-daemonset + '[' 1 -ne 1 ']' + test_name=examples-openshift-agent-as-daemonset + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-openshift-agent-as-daemonset' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-openshift-agent-as-daemonset\e[0m' Rendering files for test examples-openshift-agent-as-daemonset + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-sampling + '[' examples-with-sampling '!=' _build ']' + cd .. + mkdir -p examples-openshift-agent-as-daemonset + cd examples-openshift-agent-as-daemonset + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/agent-as-daemonset.yaml -o 02-install.yaml + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./02-assert.yaml + render_install_vertx 03 + '[' 1 -ne 1 ']' + test_step=03 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./03-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].env=[{"name": "JAEGER_AGENT_HOST", "valueFrom": {"fieldRef": {"apiVersion": "v1", "fieldPath": "status.hostIP"}}}]' ./03-install.yaml + render_find_service agent-as-daemonset production order 00 04 + '[' 5 -ne 5 ']' + jaeger=agent-as-daemonset + deployment_strategy=production + service_name=order + job_number=00 + test_step=04 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' production '!=' allInOne ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/find-service.yaml.template -o ./04-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + start_test examples-openshift-with-htpasswd + '[' 1 -ne 1 ']' + test_name=examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-openshift-with-htpasswd' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-openshift-with-htpasswd\e[0m' Rendering files for test examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-openshift-agent-as-daemonset + '[' examples-openshift-agent-as-daemonset '!=' _build ']' + cd .. + mkdir -p examples-openshift-with-htpasswd + cd examples-openshift-with-htpasswd + export JAEGER_NAME=with-htpasswd + JAEGER_NAME=with-htpasswd + export JAEGER_USERNAME=awesomeuser + JAEGER_USERNAME=awesomeuser + export JAEGER_PASSWORD=awesomepassword + JAEGER_PASSWORD=awesomepassword + export 'JAEGER_USER_PASSWORD_HASH=awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' + JAEGER_USER_PASSWORD_HASH='awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ echo 'awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ base64 + SECRET=YXdlc29tZXVzZXI6e1NIQX11VWRxUFZVeXFOQm1FUlUwUXhqM0tGYVpuanc9Cg== + /tmp/jaeger-tests/bin/gomplate -f ./00-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/with-htpasswd.yaml -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + export 'GET_URL_COMMAND=kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + export 'URL=https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + INSECURE=true + JAEGER_USERNAME= + JAEGER_PASSWORD= + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-unsecured.yaml + JAEGER_USERNAME=wronguser + JAEGER_PASSWORD=wrongpassword + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-check-unauthorized.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./04-check-authorized.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running examples E2E tests' Running examples E2E tests + cd tests/e2e/examples/_build + set +e + KUBECONFIG=/tmp/kubeconfig-2270117936 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 17 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/examples-agent-as-daemonset === PAUSE kuttl/harness/examples-agent-as-daemonset === RUN kuttl/harness/examples-agent-with-priority-class === PAUSE kuttl/harness/examples-agent-with-priority-class === RUN kuttl/harness/examples-all-in-one-with-options === PAUSE kuttl/harness/examples-all-in-one-with-options === RUN kuttl/harness/examples-auto-provision-kafka === PAUSE kuttl/harness/examples-auto-provision-kafka === RUN kuttl/harness/examples-business-application-injected-sidecar === PAUSE kuttl/harness/examples-business-application-injected-sidecar === RUN kuttl/harness/examples-collector-with-priority-class === PAUSE kuttl/harness/examples-collector-with-priority-class === RUN kuttl/harness/examples-openshift-agent-as-daemonset === PAUSE kuttl/harness/examples-openshift-agent-as-daemonset === RUN kuttl/harness/examples-openshift-with-htpasswd === PAUSE kuttl/harness/examples-openshift-with-htpasswd === RUN kuttl/harness/examples-service-types === PAUSE kuttl/harness/examples-service-types === RUN kuttl/harness/examples-simple-prod === PAUSE kuttl/harness/examples-simple-prod === RUN kuttl/harness/examples-simple-prod-with-volumes === PAUSE kuttl/harness/examples-simple-prod-with-volumes === RUN kuttl/harness/examples-simplest === PAUSE kuttl/harness/examples-simplest === RUN kuttl/harness/examples-with-badger === PAUSE kuttl/harness/examples-with-badger === RUN kuttl/harness/examples-with-badger-and-volume === PAUSE kuttl/harness/examples-with-badger-and-volume === RUN kuttl/harness/examples-with-cassandra === PAUSE kuttl/harness/examples-with-cassandra === RUN kuttl/harness/examples-with-sampling === PAUSE kuttl/harness/examples-with-sampling === CONT kuttl/harness/artifacts logger.go:42: 07:04:22 | artifacts | Creating namespace: kuttl-test-better-anteater logger.go:42: 07:04:22 | artifacts | artifacts events from ns kuttl-test-better-anteater: logger.go:42: 07:04:22 | artifacts | Deleting namespace: kuttl-test-better-anteater === CONT kuttl/harness/examples-service-types logger.go:42: 07:04:28 | examples-service-types | Creating namespace: kuttl-test-talented-roughy logger.go:42: 07:04:28 | examples-service-types/0-install | starting test step 0-install logger.go:42: 07:04:28 | examples-service-types/0-install | Jaeger:kuttl-test-talented-roughy/service-types created logger.go:42: 07:04:34 | examples-service-types/0-install | test step completed 0-install logger.go:42: 07:04:34 | examples-service-types/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:04:34 | examples-service-types/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE service-types /dev/null] logger.go:42: 07:04:35 | examples-service-types/1-smoke-test | Warning: resource jaegers/service-types is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:04:41 | examples-service-types/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://service-types-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:04:42 | examples-service-types/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:04:42 | examples-service-types/1-smoke-test | job.batch/report-span created logger.go:42: 07:04:42 | examples-service-types/1-smoke-test | job.batch/check-span created logger.go:42: 07:04:54 | examples-service-types/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:04:54 | examples-service-types/2- | starting test step 2- logger.go:42: 07:04:54 | examples-service-types/2- | test step completed 2- logger.go:42: 07:04:54 | examples-service-types | examples-service-types events from ns kuttl-test-talented-roughy: logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:31 +0000 UTC Normal Pod service-types-648f9cd654-x9cb7 Binding Scheduled Successfully assigned kuttl-test-talented-roughy/service-types-648f9cd654-x9cb7 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:31 +0000 UTC Normal ReplicaSet.apps service-types-648f9cd654 SuccessfulCreate Created pod: service-types-648f9cd654-x9cb7 replicaset-controller logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:31 +0000 UTC Normal Service service-types-collector EnsuringLoadBalancer Ensuring load balancer logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:31 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-648f9cd654 to 1 deployment-controller logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:32 +0000 UTC Normal Pod service-types-648f9cd654-x9cb7 AddedInterface Add eth0 [10.131.0.44/23] from ovn-kubernetes logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:32 +0000 UTC Normal Pod service-types-648f9cd654-x9cb7.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:32 +0000 UTC Normal Pod service-types-648f9cd654-x9cb7.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:32 +0000 UTC Normal Pod service-types-648f9cd654-x9cb7.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:32 +0000 UTC Normal Pod service-types-648f9cd654-x9cb7.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:32 +0000 UTC Normal Pod service-types-648f9cd654-x9cb7.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:32 +0000 UTC Normal Pod service-types-648f9cd654-x9cb7.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:35 +0000 UTC Normal Service service-types-collector EnsuredLoadBalancer Ensured load balancer logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:35 +0000 UTC Normal Service service-types-query EnsuringLoadBalancer Ensuring load balancer logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:38 +0000 UTC Normal Pod service-types-648f9cd654-x9cb7.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:38 +0000 UTC Normal Service service-types-query EnsuredLoadBalancer Ensured load balancer logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:38 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled down replica set service-types-648f9cd654 to 0 from 1 deployment-controller logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:39 +0000 UTC Normal Pod service-types-648f9cd654-x9cb7.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:39 +0000 UTC Normal ReplicaSet.apps service-types-648f9cd654 SuccessfulDelete Deleted pod: service-types-648f9cd654-x9cb7 replicaset-controller logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:39 +0000 UTC Normal Pod service-types-bff7ff959-kmtp7 Binding Scheduled Successfully assigned kuttl-test-talented-roughy/service-types-bff7ff959-kmtp7 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:39 +0000 UTC Normal ReplicaSet.apps service-types-bff7ff959 SuccessfulCreate Created pod: service-types-bff7ff959-kmtp7 replicaset-controller logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:39 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-bff7ff959 to 1 deployment-controller logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:40 +0000 UTC Normal Pod service-types-bff7ff959-kmtp7 AddedInterface Add eth0 [10.131.0.45/23] from ovn-kubernetes logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:40 +0000 UTC Normal Pod service-types-bff7ff959-kmtp7.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:40 +0000 UTC Normal Pod service-types-bff7ff959-kmtp7.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:40 +0000 UTC Normal Pod service-types-bff7ff959-kmtp7.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:40 +0000 UTC Normal Pod service-types-bff7ff959-kmtp7.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:40 +0000 UTC Normal Pod service-types-bff7ff959-kmtp7.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:40 +0000 UTC Normal Pod service-types-bff7ff959-kmtp7.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:42 +0000 UTC Normal Pod check-span-zwl98 Binding Scheduled Successfully assigned kuttl-test-talented-roughy/check-span-zwl98 to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:42 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-zwl98 job-controller logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:42 +0000 UTC Normal Pod report-span-jx524 Binding Scheduled Successfully assigned kuttl-test-talented-roughy/report-span-jx524 to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:42 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-jx524 job-controller logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:43 +0000 UTC Normal Pod check-span-zwl98 AddedInterface Add eth0 [10.128.2.37/23] from ovn-kubernetes logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:43 +0000 UTC Normal Pod check-span-zwl98.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:43 +0000 UTC Normal Pod check-span-zwl98.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:43 +0000 UTC Normal Pod check-span-zwl98.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:43 +0000 UTC Normal Pod report-span-jx524 AddedInterface Add eth0 [10.128.2.36/23] from ovn-kubernetes logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:43 +0000 UTC Normal Pod report-span-jx524.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:43 +0000 UTC Normal Pod report-span-jx524.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:43 +0000 UTC Normal Pod report-span-jx524.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:04:54 | examples-service-types | 2023-09-25 07:04:54 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:04:54 | examples-service-types | Deleting namespace: kuttl-test-talented-roughy === CONT kuttl/harness/examples-with-sampling logger.go:42: 07:05:27 | examples-with-sampling | Creating namespace: kuttl-test-destined-bat logger.go:42: 07:05:27 | examples-with-sampling/0-install | starting test step 0-install logger.go:42: 07:05:27 | examples-with-sampling/0-install | running command: [sh -c cd /tmp/jaeger-tests && make cassandra STORAGE_NAMESPACE=$NAMESPACE] logger.go:42: 07:05:27 | examples-with-sampling/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 07:05:27 | examples-with-sampling/0-install | >>>> Creating namespace kuttl-test-destined-bat logger.go:42: 07:05:27 | examples-with-sampling/0-install | kubectl create namespace kuttl-test-destined-bat 2>&1 | grep -v "already exists" || true logger.go:42: 07:05:27 | examples-with-sampling/0-install | kubectl create -f ./tests/cassandra.yml --namespace kuttl-test-destined-bat 2>&1 | grep -v "already exists" || true logger.go:42: 07:05:28 | examples-with-sampling/0-install | service/cassandra created logger.go:42: 07:05:28 | examples-with-sampling/0-install | statefulset.apps/cassandra created logger.go:42: 07:05:28 | examples-with-sampling/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 07:05:38 | examples-with-sampling/0-install | test step completed 0-install logger.go:42: 07:05:38 | examples-with-sampling/1-install | starting test step 1-install logger.go:42: 07:05:38 | examples-with-sampling/1-install | Jaeger:kuttl-test-destined-bat/with-sampling created logger.go:42: 07:05:44 | examples-with-sampling/1-install | test step completed 1-install logger.go:42: 07:05:44 | examples-with-sampling/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:05:44 | examples-with-sampling/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-sampling /dev/null] logger.go:42: 07:05:45 | examples-with-sampling/2-smoke-test | Warning: resource jaegers/with-sampling is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:05:52 | examples-with-sampling/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:05:52 | examples-with-sampling/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:05:52 | examples-with-sampling/2-smoke-test | job.batch/report-span created logger.go:42: 07:05:52 | examples-with-sampling/2-smoke-test | job.batch/check-span created logger.go:42: 07:06:03 | examples-with-sampling/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:06:03 | examples-with-sampling/3- | starting test step 3- logger.go:42: 07:06:03 | examples-with-sampling/3- | test step completed 3- logger.go:42: 07:06:04 | examples-with-sampling | examples-with-sampling events from ns kuttl-test-destined-bat: logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:28 +0000 UTC Normal Pod cassandra-0 Binding Scheduled Successfully assigned kuttl-test-destined-bat/cassandra-0 to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:28 +0000 UTC Normal Pod cassandra-0 AddedInterface Add eth0 [10.128.2.38/23] from ovn-kubernetes logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:28 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulling Pulling image "cassandra:3.11" kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:28 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-0 in StatefulSet cassandra successful statefulset-controller logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:32 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulled Successfully pulled image "cassandra:3.11" in 4.080000109s (4.080014149s including waiting) kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:33 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:33 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:33 +0000 UTC Normal Pod cassandra-1 Binding Scheduled Successfully assigned kuttl-test-destined-bat/cassandra-1 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:33 +0000 UTC Normal Pod cassandra-1 AddedInterface Add eth0 [10.131.0.46/23] from ovn-kubernetes logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:33 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulling Pulling image "cassandra:3.11" kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:33 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-1 in StatefulSet cassandra successful statefulset-controller logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:37 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulled Successfully pulled image "cassandra:3.11" in 3.967028181s (3.967042931s including waiting) kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:37 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:37 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:41 +0000 UTC Normal Pod with-sampling-855bff7687-x5c6t Binding Scheduled Successfully assigned kuttl-test-destined-bat/with-sampling-855bff7687-x5c6t to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:41 +0000 UTC Normal ReplicaSet.apps with-sampling-855bff7687 SuccessfulCreate Created pod: with-sampling-855bff7687-x5c6t replicaset-controller logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:41 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled up replica set with-sampling-855bff7687 to 1 deployment-controller logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:42 +0000 UTC Normal Pod with-sampling-855bff7687-x5c6t AddedInterface Add eth0 [10.131.0.47/23] from ovn-kubernetes logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:42 +0000 UTC Normal Pod with-sampling-855bff7687-x5c6t.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:42 +0000 UTC Normal Pod with-sampling-855bff7687-x5c6t.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:42 +0000 UTC Normal Pod with-sampling-855bff7687-x5c6t.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:42 +0000 UTC Normal Pod with-sampling-855bff7687-x5c6t.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:42 +0000 UTC Normal Pod with-sampling-855bff7687-x5c6t.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:42 +0000 UTC Normal Pod with-sampling-855bff7687-x5c6t.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:49 +0000 UTC Normal Pod with-sampling-855bff7687-x5c6t.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:49 +0000 UTC Normal Pod with-sampling-855bff7687-x5c6t.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:49 +0000 UTC Normal ReplicaSet.apps with-sampling-855bff7687 SuccessfulDelete Deleted pod: with-sampling-855bff7687-x5c6t replicaset-controller logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:49 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled down replica set with-sampling-855bff7687 to 0 from 1 deployment-controller logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:50 +0000 UTC Normal Pod with-sampling-7874bd45b-8g2mj Binding Scheduled Successfully assigned kuttl-test-destined-bat/with-sampling-7874bd45b-8g2mj to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:50 +0000 UTC Normal Pod with-sampling-7874bd45b-8g2mj AddedInterface Add eth0 [10.131.0.48/23] from ovn-kubernetes logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:50 +0000 UTC Normal Pod with-sampling-7874bd45b-8g2mj.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:50 +0000 UTC Normal Pod with-sampling-7874bd45b-8g2mj.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:50 +0000 UTC Normal Pod with-sampling-7874bd45b-8g2mj.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:50 +0000 UTC Normal Pod with-sampling-7874bd45b-8g2mj.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:50 +0000 UTC Normal Pod with-sampling-7874bd45b-8g2mj.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:50 +0000 UTC Normal Pod with-sampling-7874bd45b-8g2mj.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:50 +0000 UTC Normal ReplicaSet.apps with-sampling-7874bd45b SuccessfulCreate Created pod: with-sampling-7874bd45b-8g2mj replicaset-controller logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:50 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled up replica set with-sampling-7874bd45b to 1 deployment-controller logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:52 +0000 UTC Normal Pod check-span-pv7fw Binding Scheduled Successfully assigned kuttl-test-destined-bat/check-span-pv7fw to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:52 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-pv7fw job-controller logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:52 +0000 UTC Normal Pod report-span-ds7lv Binding Scheduled Successfully assigned kuttl-test-destined-bat/report-span-ds7lv to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:52 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-ds7lv job-controller logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:53 +0000 UTC Normal Pod check-span-pv7fw AddedInterface Add eth0 [10.129.2.51/23] from ovn-kubernetes logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:53 +0000 UTC Normal Pod check-span-pv7fw.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:53 +0000 UTC Normal Pod check-span-pv7fw.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:53 +0000 UTC Normal Pod check-span-pv7fw.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:53 +0000 UTC Normal Pod report-span-ds7lv AddedInterface Add eth0 [10.128.2.39/23] from ovn-kubernetes logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:53 +0000 UTC Normal Pod report-span-ds7lv.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:53 +0000 UTC Normal Pod report-span-ds7lv.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:05:53 +0000 UTC Normal Pod report-span-ds7lv.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:06:04 | examples-with-sampling | 2023-09-25 07:06:03 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:06:04 | examples-with-sampling | Deleting namespace: kuttl-test-destined-bat === CONT kuttl/harness/examples-with-cassandra logger.go:42: 07:07:22 | examples-with-cassandra | Creating namespace: kuttl-test-welcome-squid logger.go:42: 07:07:22 | examples-with-cassandra/0-install | starting test step 0-install logger.go:42: 07:07:22 | examples-with-cassandra/0-install | running command: [sh -c cd /tmp/jaeger-tests && make cassandra STORAGE_NAMESPACE=$NAMESPACE] logger.go:42: 07:07:22 | examples-with-cassandra/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 07:07:22 | examples-with-cassandra/0-install | >>>> Creating namespace kuttl-test-welcome-squid logger.go:42: 07:07:22 | examples-with-cassandra/0-install | kubectl create namespace kuttl-test-welcome-squid 2>&1 | grep -v "already exists" || true logger.go:42: 07:07:22 | examples-with-cassandra/0-install | kubectl create -f ./tests/cassandra.yml --namespace kuttl-test-welcome-squid 2>&1 | grep -v "already exists" || true logger.go:42: 07:07:23 | examples-with-cassandra/0-install | service/cassandra created logger.go:42: 07:07:23 | examples-with-cassandra/0-install | statefulset.apps/cassandra created logger.go:42: 07:07:23 | examples-with-cassandra/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 07:07:25 | examples-with-cassandra/0-install | test step completed 0-install logger.go:42: 07:07:25 | examples-with-cassandra/1-install | starting test step 1-install logger.go:42: 07:07:25 | examples-with-cassandra/1-install | Jaeger:kuttl-test-welcome-squid/with-cassandra created logger.go:42: 07:07:45 | examples-with-cassandra/1-install | test step completed 1-install logger.go:42: 07:07:45 | examples-with-cassandra/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:07:45 | examples-with-cassandra/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-cassandra /dev/null] logger.go:42: 07:07:46 | examples-with-cassandra/2-smoke-test | Warning: resource jaegers/with-cassandra is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:07:53 | examples-with-cassandra/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:07:53 | examples-with-cassandra/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:07:53 | examples-with-cassandra/2-smoke-test | job.batch/report-span created logger.go:42: 07:07:53 | examples-with-cassandra/2-smoke-test | job.batch/check-span created logger.go:42: 07:08:05 | examples-with-cassandra/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:08:05 | examples-with-cassandra | examples-with-cassandra events from ns kuttl-test-welcome-squid: logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:23 +0000 UTC Normal Pod cassandra-0 Binding Scheduled Successfully assigned kuttl-test-welcome-squid/cassandra-0 to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:23 +0000 UTC Normal Pod cassandra-0 AddedInterface Add eth0 [10.128.2.40/23] from ovn-kubernetes logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:23 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulled Container image "cassandra:3.11" already present on machine kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:23 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-0 in StatefulSet cassandra successful statefulset-controller logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:24 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:24 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:24 +0000 UTC Normal Pod cassandra-1 Binding Scheduled Successfully assigned kuttl-test-welcome-squid/cassandra-1 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:24 +0000 UTC Normal Pod cassandra-1 AddedInterface Add eth0 [10.131.0.49/23] from ovn-kubernetes logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:24 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulled Container image "cassandra:3.11" already present on machine kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:24 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-1 in StatefulSet cassandra successful statefulset-controller logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:25 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:25 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:28 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-m69xb Binding Scheduled Successfully assigned kuttl-test-welcome-squid/with-cassandra-cassandra-schema-job-m69xb to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:28 +0000 UTC Normal Job.batch with-cassandra-cassandra-schema-job SuccessfulCreate Created pod: with-cassandra-cassandra-schema-job-m69xb job-controller logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:29 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-m69xb AddedInterface Add eth0 [10.128.2.41/23] from ovn-kubernetes logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:29 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-m69xb.spec.containers{with-cassandra-cassandra-schema-job} Pulling Pulling image "jaegertracing/jaeger-cassandra-schema:1.47.0" kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:34 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-m69xb.spec.containers{with-cassandra-cassandra-schema-job} Pulled Successfully pulled image "jaegertracing/jaeger-cassandra-schema:1.47.0" in 4.674868357s (4.674880427s including waiting) kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:34 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-m69xb.spec.containers{with-cassandra-cassandra-schema-job} Created Created container with-cassandra-cassandra-schema-job kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:34 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-m69xb.spec.containers{with-cassandra-cassandra-schema-job} Started Started container with-cassandra-cassandra-schema-job kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:40 +0000 UTC Normal Pod with-cassandra-6846c986dd-4dfhj Binding Scheduled Successfully assigned kuttl-test-welcome-squid/with-cassandra-6846c986dd-4dfhj to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:40 +0000 UTC Normal ReplicaSet.apps with-cassandra-6846c986dd SuccessfulCreate Created pod: with-cassandra-6846c986dd-4dfhj replicaset-controller logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:40 +0000 UTC Normal Job.batch with-cassandra-cassandra-schema-job Completed Job completed job-controller logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:40 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled up replica set with-cassandra-6846c986dd to 1 deployment-controller logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:41 +0000 UTC Normal Pod with-cassandra-6846c986dd-4dfhj AddedInterface Add eth0 [10.128.2.42/23] from ovn-kubernetes logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:41 +0000 UTC Normal Pod with-cassandra-6846c986dd-4dfhj.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:41 +0000 UTC Normal Pod with-cassandra-6846c986dd-4dfhj.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:41 +0000 UTC Normal Pod with-cassandra-6846c986dd-4dfhj.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:41 +0000 UTC Normal Pod with-cassandra-6846c986dd-4dfhj.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:41 +0000 UTC Normal Pod with-cassandra-6846c986dd-4dfhj.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:41 +0000 UTC Normal Pod with-cassandra-6846c986dd-4dfhj.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:48 +0000 UTC Normal Pod with-cassandra-6846c986dd-4dfhj.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:48 +0000 UTC Normal Pod with-cassandra-6846c986dd-4dfhj.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:48 +0000 UTC Normal ReplicaSet.apps with-cassandra-6846c986dd SuccessfulDelete Deleted pod: with-cassandra-6846c986dd-4dfhj replicaset-controller logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:48 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled down replica set with-cassandra-6846c986dd to 0 from 1 deployment-controller logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:49 +0000 UTC Normal Pod with-cassandra-7846d5dbd6-jk6rk Binding Scheduled Successfully assigned kuttl-test-welcome-squid/with-cassandra-7846d5dbd6-jk6rk to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:49 +0000 UTC Normal ReplicaSet.apps with-cassandra-7846d5dbd6 SuccessfulCreate Created pod: with-cassandra-7846d5dbd6-jk6rk replicaset-controller logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:49 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled up replica set with-cassandra-7846d5dbd6 to 1 deployment-controller logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:50 +0000 UTC Normal Pod with-cassandra-7846d5dbd6-jk6rk AddedInterface Add eth0 [10.128.2.43/23] from ovn-kubernetes logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:50 +0000 UTC Normal Pod with-cassandra-7846d5dbd6-jk6rk.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:50 +0000 UTC Normal Pod with-cassandra-7846d5dbd6-jk6rk.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:50 +0000 UTC Normal Pod with-cassandra-7846d5dbd6-jk6rk.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:50 +0000 UTC Normal Pod with-cassandra-7846d5dbd6-jk6rk.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:50 +0000 UTC Normal Pod with-cassandra-7846d5dbd6-jk6rk.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:50 +0000 UTC Normal Pod with-cassandra-7846d5dbd6-jk6rk.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:53 +0000 UTC Normal Pod check-span-gwk6p Binding Scheduled Successfully assigned kuttl-test-welcome-squid/check-span-gwk6p to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:53 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-gwk6p job-controller logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:53 +0000 UTC Normal Pod report-span-j8x2h Binding Scheduled Successfully assigned kuttl-test-welcome-squid/report-span-j8x2h to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:53 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-j8x2h job-controller logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:54 +0000 UTC Normal Pod check-span-gwk6p AddedInterface Add eth0 [10.129.2.52/23] from ovn-kubernetes logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:54 +0000 UTC Normal Pod check-span-gwk6p.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:54 +0000 UTC Normal Pod check-span-gwk6p.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:54 +0000 UTC Normal Pod check-span-gwk6p.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:54 +0000 UTC Normal Pod report-span-j8x2h AddedInterface Add eth0 [10.131.0.50/23] from ovn-kubernetes logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:54 +0000 UTC Normal Pod report-span-j8x2h.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:54 +0000 UTC Normal Pod report-span-j8x2h.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:07:54 +0000 UTC Normal Pod report-span-j8x2h.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:08:05 | examples-with-cassandra | 2023-09-25 07:08:05 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:08:05 | examples-with-cassandra | Deleting namespace: kuttl-test-welcome-squid === CONT kuttl/harness/examples-with-badger-and-volume logger.go:42: 07:08:17 | examples-with-badger-and-volume | Creating namespace: kuttl-test-new-mullet logger.go:42: 07:08:17 | examples-with-badger-and-volume/0-install | starting test step 0-install logger.go:42: 07:08:17 | examples-with-badger-and-volume/0-install | Jaeger:kuttl-test-new-mullet/with-badger-and-volume created logger.go:42: 07:08:24 | examples-with-badger-and-volume/0-install | test step completed 0-install logger.go:42: 07:08:24 | examples-with-badger-and-volume/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:08:24 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-badger-and-volume /dev/null] logger.go:42: 07:08:25 | examples-with-badger-and-volume/1-smoke-test | Warning: resource jaegers/with-badger-and-volume is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:08:32 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:08:32 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:08:32 | examples-with-badger-and-volume/1-smoke-test | job.batch/report-span created logger.go:42: 07:08:32 | examples-with-badger-and-volume/1-smoke-test | job.batch/check-span created logger.go:42: 07:08:43 | examples-with-badger-and-volume/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:08:43 | examples-with-badger-and-volume | examples-with-badger-and-volume events from ns kuttl-test-new-mullet: logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:21 +0000 UTC Normal Pod with-badger-and-volume-7cf4776958-ptcvz Binding Scheduled Successfully assigned kuttl-test-new-mullet/with-badger-and-volume-7cf4776958-ptcvz to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:21 +0000 UTC Warning Pod with-badger-and-volume-7cf4776958-ptcvz FailedMount MountVolume.SetUp failed for volume "with-badger-and-volume-ui-oauth-proxy-tls" : secret "with-badger-and-volume-ui-oauth-proxy-tls" not found kubelet logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:21 +0000 UTC Normal ReplicaSet.apps with-badger-and-volume-7cf4776958 SuccessfulCreate Created pod: with-badger-and-volume-7cf4776958-ptcvz replicaset-controller logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:21 +0000 UTC Normal Deployment.apps with-badger-and-volume ScalingReplicaSet Scaled up replica set with-badger-and-volume-7cf4776958 to 1 deployment-controller logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:22 +0000 UTC Normal Pod with-badger-and-volume-7cf4776958-ptcvz AddedInterface Add eth0 [10.128.2.44/23] from ovn-kubernetes logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:22 +0000 UTC Normal Pod with-badger-and-volume-7cf4776958-ptcvz.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:22 +0000 UTC Normal Pod with-badger-and-volume-7cf4776958-ptcvz.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:22 +0000 UTC Normal Pod with-badger-and-volume-7cf4776958-ptcvz.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:22 +0000 UTC Normal Pod with-badger-and-volume-7cf4776958-ptcvz.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:22 +0000 UTC Normal Pod with-badger-and-volume-7cf4776958-ptcvz.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:22 +0000 UTC Normal Pod with-badger-and-volume-7cf4776958-ptcvz.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:27 +0000 UTC Normal Pod with-badger-and-volume-59bb444879-nqlwm Binding Scheduled Successfully assigned kuttl-test-new-mullet/with-badger-and-volume-59bb444879-nqlwm to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:27 +0000 UTC Normal ReplicaSet.apps with-badger-and-volume-59bb444879 SuccessfulCreate Created pod: with-badger-and-volume-59bb444879-nqlwm replicaset-controller logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:27 +0000 UTC Normal Pod with-badger-and-volume-7cf4776958-ptcvz.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:27 +0000 UTC Normal Pod with-badger-and-volume-7cf4776958-ptcvz.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:27 +0000 UTC Normal ReplicaSet.apps with-badger-and-volume-7cf4776958 SuccessfulDelete Deleted pod: with-badger-and-volume-7cf4776958-ptcvz replicaset-controller logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:27 +0000 UTC Normal Deployment.apps with-badger-and-volume ScalingReplicaSet Scaled down replica set with-badger-and-volume-7cf4776958 to 0 from 1 deployment-controller logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:27 +0000 UTC Normal Deployment.apps with-badger-and-volume ScalingReplicaSet Scaled up replica set with-badger-and-volume-59bb444879 to 1 deployment-controller logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:28 +0000 UTC Normal Pod with-badger-and-volume-59bb444879-nqlwm AddedInterface Add eth0 [10.131.0.51/23] from ovn-kubernetes logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:28 +0000 UTC Normal Pod with-badger-and-volume-59bb444879-nqlwm.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:28 +0000 UTC Normal Pod with-badger-and-volume-59bb444879-nqlwm.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:28 +0000 UTC Normal Pod with-badger-and-volume-59bb444879-nqlwm.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:28 +0000 UTC Normal Pod with-badger-and-volume-59bb444879-nqlwm.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:28 +0000 UTC Normal Pod with-badger-and-volume-59bb444879-nqlwm.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:28 +0000 UTC Normal Pod with-badger-and-volume-59bb444879-nqlwm.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:32 +0000 UTC Normal Pod check-span-xtp9g Binding Scheduled Successfully assigned kuttl-test-new-mullet/check-span-xtp9g to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:32 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-xtp9g job-controller logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:32 +0000 UTC Normal Pod report-span-tdpjv Binding Scheduled Successfully assigned kuttl-test-new-mullet/report-span-tdpjv to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:32 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-tdpjv job-controller logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:33 +0000 UTC Normal Pod check-span-xtp9g AddedInterface Add eth0 [10.128.2.46/23] from ovn-kubernetes logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:33 +0000 UTC Normal Pod check-span-xtp9g.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:33 +0000 UTC Normal Pod check-span-xtp9g.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:33 +0000 UTC Normal Pod check-span-xtp9g.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:33 +0000 UTC Normal Pod report-span-tdpjv AddedInterface Add eth0 [10.128.2.45/23] from ovn-kubernetes logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:33 +0000 UTC Normal Pod report-span-tdpjv.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:33 +0000 UTC Normal Pod report-span-tdpjv.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:33 +0000 UTC Normal Pod report-span-tdpjv.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:08:43 | examples-with-badger-and-volume | 2023-09-25 07:08:43 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:08:43 | examples-with-badger-and-volume | Deleting namespace: kuttl-test-new-mullet === CONT kuttl/harness/examples-with-badger logger.go:42: 07:08:55 | examples-with-badger | Creating namespace: kuttl-test-unique-grizzly logger.go:42: 07:08:55 | examples-with-badger/0-install | starting test step 0-install logger.go:42: 07:08:55 | examples-with-badger/0-install | Jaeger:kuttl-test-unique-grizzly/with-badger created logger.go:42: 07:09:01 | examples-with-badger/0-install | test step completed 0-install logger.go:42: 07:09:01 | examples-with-badger/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:09:01 | examples-with-badger/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-badger /dev/null] logger.go:42: 07:09:03 | examples-with-badger/1-smoke-test | Warning: resource jaegers/with-badger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:09:09 | examples-with-badger/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:09:09 | examples-with-badger/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:09:10 | examples-with-badger/1-smoke-test | job.batch/report-span created logger.go:42: 07:09:10 | examples-with-badger/1-smoke-test | job.batch/check-span created logger.go:42: 07:09:22 | examples-with-badger/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:09:22 | examples-with-badger | examples-with-badger events from ns kuttl-test-unique-grizzly: logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:08:59 +0000 UTC Normal Pod with-badger-7dbdb6749b-5p5tg Binding Scheduled Successfully assigned kuttl-test-unique-grizzly/with-badger-7dbdb6749b-5p5tg to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:08:59 +0000 UTC Normal Pod with-badger-7dbdb6749b-5p5tg AddedInterface Add eth0 [10.131.0.52/23] from ovn-kubernetes logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:08:59 +0000 UTC Normal Pod with-badger-7dbdb6749b-5p5tg.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:08:59 +0000 UTC Normal Pod with-badger-7dbdb6749b-5p5tg.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:08:59 +0000 UTC Normal Pod with-badger-7dbdb6749b-5p5tg.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:08:59 +0000 UTC Normal Pod with-badger-7dbdb6749b-5p5tg.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:08:59 +0000 UTC Normal ReplicaSet.apps with-badger-7dbdb6749b SuccessfulCreate Created pod: with-badger-7dbdb6749b-5p5tg replicaset-controller logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:08:59 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-7dbdb6749b to 1 deployment-controller logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:00 +0000 UTC Normal Pod with-badger-7dbdb6749b-5p5tg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:00 +0000 UTC Normal Pod with-badger-7dbdb6749b-5p5tg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:06 +0000 UTC Normal Pod with-badger-7dbdb6749b-5p5tg.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:06 +0000 UTC Normal Pod with-badger-7dbdb6749b-5p5tg.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:06 +0000 UTC Normal ReplicaSet.apps with-badger-7dbdb6749b SuccessfulDelete Deleted pod: with-badger-7dbdb6749b-5p5tg replicaset-controller logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:06 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled down replica set with-badger-7dbdb6749b to 0 from 1 deployment-controller logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:07 +0000 UTC Normal Pod with-badger-645988f94b-cdzvz Binding Scheduled Successfully assigned kuttl-test-unique-grizzly/with-badger-645988f94b-cdzvz to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:07 +0000 UTC Normal Pod with-badger-645988f94b-cdzvz AddedInterface Add eth0 [10.131.0.53/23] from ovn-kubernetes logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:07 +0000 UTC Normal Pod with-badger-645988f94b-cdzvz.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:07 +0000 UTC Normal ReplicaSet.apps with-badger-645988f94b SuccessfulCreate Created pod: with-badger-645988f94b-cdzvz replicaset-controller logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:07 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-645988f94b to 1 deployment-controller logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:08 +0000 UTC Normal Pod with-badger-645988f94b-cdzvz.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:08 +0000 UTC Normal Pod with-badger-645988f94b-cdzvz.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:08 +0000 UTC Normal Pod with-badger-645988f94b-cdzvz.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:08 +0000 UTC Normal Pod with-badger-645988f94b-cdzvz.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:08 +0000 UTC Normal Pod with-badger-645988f94b-cdzvz.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:10 +0000 UTC Normal Pod check-span-bqkqs Binding Scheduled Successfully assigned kuttl-test-unique-grizzly/check-span-bqkqs to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:10 +0000 UTC Normal Pod check-span-bqkqs AddedInterface Add eth0 [10.128.2.48/23] from ovn-kubernetes logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:10 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-bqkqs job-controller logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:10 +0000 UTC Normal Pod report-span-f9dgf Binding Scheduled Successfully assigned kuttl-test-unique-grizzly/report-span-f9dgf to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:10 +0000 UTC Normal Pod report-span-f9dgf AddedInterface Add eth0 [10.128.2.47/23] from ovn-kubernetes logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:10 +0000 UTC Normal Pod report-span-f9dgf.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:10 +0000 UTC Normal Pod report-span-f9dgf.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:10 +0000 UTC Normal Pod report-span-f9dgf.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:10 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-f9dgf job-controller logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:11 +0000 UTC Normal Pod check-span-bqkqs.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:11 +0000 UTC Normal Pod check-span-bqkqs.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:11 +0000 UTC Normal Pod check-span-bqkqs.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:09:22 | examples-with-badger | 2023-09-25 07:09:22 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:09:22 | examples-with-badger | Deleting namespace: kuttl-test-unique-grizzly === CONT kuttl/harness/examples-simplest logger.go:42: 07:09:28 | examples-simplest | Creating namespace: kuttl-test-social-ox logger.go:42: 07:09:28 | examples-simplest/0-install | starting test step 0-install logger.go:42: 07:09:28 | examples-simplest/0-install | Jaeger:kuttl-test-social-ox/simplest created logger.go:42: 07:09:34 | examples-simplest/0-install | test step completed 0-install logger.go:42: 07:09:34 | examples-simplest/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:09:34 | examples-simplest/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 07:09:35 | examples-simplest/1-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:09:41 | examples-simplest/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:09:42 | examples-simplest/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:09:42 | examples-simplest/1-smoke-test | job.batch/report-span created logger.go:42: 07:09:42 | examples-simplest/1-smoke-test | job.batch/check-span created logger.go:42: 07:09:53 | examples-simplest/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:09:53 | examples-simplest | examples-simplest events from ns kuttl-test-social-ox: logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:31 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-576d665f74 to 1 deployment-controller logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:32 +0000 UTC Normal Pod simplest-576d665f74-ldrnz Binding Scheduled Successfully assigned kuttl-test-social-ox/simplest-576d665f74-ldrnz to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:32 +0000 UTC Normal Pod simplest-576d665f74-ldrnz AddedInterface Add eth0 [10.128.2.49/23] from ovn-kubernetes logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:32 +0000 UTC Normal Pod simplest-576d665f74-ldrnz.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:32 +0000 UTC Normal Pod simplest-576d665f74-ldrnz.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:32 +0000 UTC Normal Pod simplest-576d665f74-ldrnz.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:32 +0000 UTC Normal Pod simplest-576d665f74-ldrnz.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:32 +0000 UTC Normal Pod simplest-576d665f74-ldrnz.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:32 +0000 UTC Normal Pod simplest-576d665f74-ldrnz.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:32 +0000 UTC Normal ReplicaSet.apps simplest-576d665f74 SuccessfulCreate Created pod: simplest-576d665f74-ldrnz replicaset-controller logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:36 +0000 UTC Normal Pod simplest-576d665f74-ldrnz.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:36 +0000 UTC Normal Pod simplest-576d665f74-ldrnz.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:36 +0000 UTC Normal ReplicaSet.apps simplest-576d665f74 SuccessfulDelete Deleted pod: simplest-576d665f74-ldrnz replicaset-controller logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:36 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-576d665f74 to 0 from 1 deployment-controller logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:37 +0000 UTC Normal Pod simplest-6d45745c5d-b89sc Binding Scheduled Successfully assigned kuttl-test-social-ox/simplest-6d45745c5d-b89sc to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:37 +0000 UTC Normal ReplicaSet.apps simplest-6d45745c5d SuccessfulCreate Created pod: simplest-6d45745c5d-b89sc replicaset-controller logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:37 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-6d45745c5d to 1 deployment-controller logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:38 +0000 UTC Normal Pod simplest-6d45745c5d-b89sc AddedInterface Add eth0 [10.128.2.50/23] from ovn-kubernetes logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:38 +0000 UTC Normal Pod simplest-6d45745c5d-b89sc.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:38 +0000 UTC Normal Pod simplest-6d45745c5d-b89sc.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:38 +0000 UTC Normal Pod simplest-6d45745c5d-b89sc.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:38 +0000 UTC Normal Pod simplest-6d45745c5d-b89sc.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:38 +0000 UTC Normal Pod simplest-6d45745c5d-b89sc.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:38 +0000 UTC Normal Pod simplest-6d45745c5d-b89sc.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:42 +0000 UTC Normal Pod check-span-t5274 Binding Scheduled Successfully assigned kuttl-test-social-ox/check-span-t5274 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:42 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-t5274 job-controller logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:42 +0000 UTC Normal Pod report-span-xcfnj Binding Scheduled Successfully assigned kuttl-test-social-ox/report-span-xcfnj to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:42 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-xcfnj job-controller logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:43 +0000 UTC Normal Pod check-span-t5274 AddedInterface Add eth0 [10.131.0.54/23] from ovn-kubernetes logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:43 +0000 UTC Normal Pod check-span-t5274.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:43 +0000 UTC Normal Pod check-span-t5274.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:43 +0000 UTC Normal Pod check-span-t5274.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:43 +0000 UTC Normal Pod report-span-xcfnj AddedInterface Add eth0 [10.131.0.55/23] from ovn-kubernetes logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:43 +0000 UTC Normal Pod report-span-xcfnj.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:43 +0000 UTC Normal Pod report-span-xcfnj.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:43 +0000 UTC Normal Pod report-span-xcfnj.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:09:53 | examples-simplest | 2023-09-25 07:09:53 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:09:53 | examples-simplest | Deleting namespace: kuttl-test-social-ox === CONT kuttl/harness/examples-simple-prod-with-volumes logger.go:42: 07:10:05 | examples-simple-prod-with-volumes | Ignoring 03-check-volume.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:10:05 | examples-simple-prod-with-volumes | Creating namespace: kuttl-test-polite-collie logger.go:42: 07:10:05 | examples-simple-prod-with-volumes/1-install | starting test step 1-install logger.go:42: 07:10:05 | examples-simple-prod-with-volumes/1-install | Jaeger:kuttl-test-polite-collie/simple-prod created logger.go:42: 07:10:42 | examples-simple-prod-with-volumes/1-install | test step completed 1-install logger.go:42: 07:10:42 | examples-simple-prod-with-volumes/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:10:42 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 07:10:44 | examples-simple-prod-with-volumes/2-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:10:50 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:10:50 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:10:51 | examples-simple-prod-with-volumes/2-smoke-test | job.batch/report-span created logger.go:42: 07:10:51 | examples-simple-prod-with-volumes/2-smoke-test | job.batch/check-span created logger.go:42: 07:11:02 | examples-simple-prod-with-volumes/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:11:02 | examples-simple-prod-with-volumes/3-check-volume | starting test step 3-check-volume logger.go:42: 07:11:02 | examples-simple-prod-with-volumes/3-check-volume | running command: [sh -c kubectl exec $(kubectl get pods -n $NAMESPACE -l app=jaeger -l app.kubernetes.io/component=collector -o yaml | /tmp/jaeger-tests/bin/yq e '.items[0].metadata.name') -n $NAMESPACE -- ls /usr/share/elasticsearch/data] logger.go:42: 07:11:02 | examples-simple-prod-with-volumes/3-check-volume | test step completed 3-check-volume logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | examples-simple-prod-with-volumes events from ns kuttl-test-polite-collie: logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:12 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpolitecolliesimpleprod-1-5558bb87skk Binding Scheduled Successfully assigned kuttl-test-polite-collie/elasticsearch-cdm-kuttltestpolitecolliesimpleprod-1-5558bb87skk to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:12 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpolitecolliesimpleprod-1-5558bb87skk FailedMount MountVolume.SetUp failed for volume "elasticsearch-metrics" : secret "elasticsearch-metrics" not found kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:12 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestpolitecolliesimpleprod-1-5558bbb885 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestpolitecolliesimpleprod-1-5558bb87skk replicaset-controller logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:12 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestpolitecolliesimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestpolitecolliesimpleprod-1-5558bbb885 to 1 deployment-controller logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpolitecolliesimpleprod-1-5558bb87skk AddedInterface Add eth0 [10.129.2.53/23] from ovn-kubernetes logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpolitecolliesimpleprod-1-5558bb87skk.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpolitecolliesimpleprod-1-5558bb87skk.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpolitecolliesimpleprod-1-5558bb87skk.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpolitecolliesimpleprod-1-5558bb87skk.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpolitecolliesimpleprod-1-5558bb87skk.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpolitecolliesimpleprod-1-5558bb87skk.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:23 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpolitecolliesimpleprod-1-5558bb87skk.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:28 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpolitecolliesimpleprod-1-5558bb87skk.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:39 +0000 UTC Normal Pod simple-prod-collector-6978c9cd74-cm5ps Binding Scheduled Successfully assigned kuttl-test-polite-collie/simple-prod-collector-6978c9cd74-cm5ps to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:39 +0000 UTC Warning Pod simple-prod-collector-6978c9cd74-cm5ps FailedMount MountVolume.SetUp failed for volume "simple-prod-collector-tls-config-volume" : secret "simple-prod-collector-headless-tls" not found kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:39 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-6978c9cd74 SuccessfulCreate Created pod: simple-prod-collector-6978c9cd74-cm5ps replicaset-controller logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:39 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-6978c9cd74 to 1 deployment-controller logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:39 +0000 UTC Normal Pod simple-prod-query-5c56c7454c-pwxqt Binding Scheduled Successfully assigned kuttl-test-polite-collie/simple-prod-query-5c56c7454c-pwxqt to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:39 +0000 UTC Normal Pod simple-prod-query-5c56c7454c-pwxqt AddedInterface Add eth0 [10.131.0.56/23] from ovn-kubernetes logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:39 +0000 UTC Normal Pod simple-prod-query-5c56c7454c-pwxqt.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:39 +0000 UTC Normal Pod simple-prod-query-5c56c7454c-pwxqt.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:39 +0000 UTC Normal Pod simple-prod-query-5c56c7454c-pwxqt.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:39 +0000 UTC Normal Pod simple-prod-query-5c56c7454c-pwxqt.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:39 +0000 UTC Normal Pod simple-prod-query-5c56c7454c-pwxqt.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:39 +0000 UTC Normal Pod simple-prod-query-5c56c7454c-pwxqt.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:39 +0000 UTC Normal Pod simple-prod-query-5c56c7454c-pwxqt.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:39 +0000 UTC Normal ReplicaSet.apps simple-prod-query-5c56c7454c SuccessfulCreate Created pod: simple-prod-query-5c56c7454c-pwxqt replicaset-controller logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:39 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-5c56c7454c to 1 deployment-controller logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:40 +0000 UTC Normal Pod simple-prod-collector-6978c9cd74-cm5ps AddedInterface Add eth0 [10.128.2.51/23] from ovn-kubernetes logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:40 +0000 UTC Normal Pod simple-prod-collector-6978c9cd74-cm5ps.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:40 +0000 UTC Normal Pod simple-prod-collector-6978c9cd74-cm5ps.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:40 +0000 UTC Normal Pod simple-prod-collector-6978c9cd74-cm5ps.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:40 +0000 UTC Normal Pod simple-prod-query-5c56c7454c-pwxqt.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:40 +0000 UTC Normal Pod simple-prod-query-5c56c7454c-pwxqt.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:45 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-5c56c7454c to 0 from 1 deployment-controller logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:46 +0000 UTC Normal Pod simple-prod-query-5c56c7454c-pwxqt.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:46 +0000 UTC Normal Pod simple-prod-query-5c56c7454c-pwxqt.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:46 +0000 UTC Normal Pod simple-prod-query-5c56c7454c-pwxqt.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:46 +0000 UTC Normal ReplicaSet.apps simple-prod-query-5c56c7454c SuccessfulDelete Deleted pod: simple-prod-query-5c56c7454c-pwxqt replicaset-controller logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:46 +0000 UTC Normal Pod simple-prod-query-674c66d797-l95tl Binding Scheduled Successfully assigned kuttl-test-polite-collie/simple-prod-query-674c66d797-l95tl to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:46 +0000 UTC Normal ReplicaSet.apps simple-prod-query-674c66d797 SuccessfulCreate Created pod: simple-prod-query-674c66d797-l95tl replicaset-controller logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:46 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-674c66d797 to 1 deployment-controller logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:47 +0000 UTC Normal Pod simple-prod-query-674c66d797-l95tl AddedInterface Add eth0 [10.131.0.57/23] from ovn-kubernetes logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:47 +0000 UTC Normal Pod simple-prod-query-674c66d797-l95tl.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:47 +0000 UTC Normal Pod simple-prod-query-674c66d797-l95tl.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:47 +0000 UTC Normal Pod simple-prod-query-674c66d797-l95tl.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:47 +0000 UTC Normal Pod simple-prod-query-674c66d797-l95tl.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:47 +0000 UTC Normal Pod simple-prod-query-674c66d797-l95tl.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:47 +0000 UTC Normal Pod simple-prod-query-674c66d797-l95tl.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:47 +0000 UTC Normal Pod simple-prod-query-674c66d797-l95tl.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:47 +0000 UTC Normal Pod simple-prod-query-674c66d797-l95tl.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:47 +0000 UTC Normal Pod simple-prod-query-674c66d797-l95tl.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:51 +0000 UTC Normal Pod check-span-6spr5 Binding Scheduled Successfully assigned kuttl-test-polite-collie/check-span-6spr5 to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:51 +0000 UTC Normal Pod check-span-6spr5 AddedInterface Add eth0 [10.128.2.53/23] from ovn-kubernetes logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:51 +0000 UTC Normal Pod check-span-6spr5.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:51 +0000 UTC Normal Pod check-span-6spr5.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:51 +0000 UTC Normal Pod check-span-6spr5.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:51 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-6spr5 job-controller logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:51 +0000 UTC Normal Pod report-span-cglw9 Binding Scheduled Successfully assigned kuttl-test-polite-collie/report-span-cglw9 to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:51 +0000 UTC Normal Pod report-span-cglw9 AddedInterface Add eth0 [10.128.2.52/23] from ovn-kubernetes logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:51 +0000 UTC Normal Pod report-span-cglw9.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:51 +0000 UTC Normal Pod report-span-cglw9.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:51 +0000 UTC Normal Pod report-span-cglw9.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:51 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-cglw9 job-controller logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:57 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:57 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:10:57 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | 2023-09-25 07:11:02 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:11:02 | examples-simple-prod-with-volumes | Deleting namespace: kuttl-test-polite-collie === CONT kuttl/harness/examples-simple-prod logger.go:42: 07:11:36 | examples-simple-prod | Creating namespace: kuttl-test-careful-jennet logger.go:42: 07:11:36 | examples-simple-prod/1-install | starting test step 1-install logger.go:42: 07:11:36 | examples-simple-prod/1-install | Jaeger:kuttl-test-careful-jennet/simple-prod created logger.go:42: 07:12:11 | examples-simple-prod/1-install | test step completed 1-install logger.go:42: 07:12:11 | examples-simple-prod/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:12:11 | examples-simple-prod/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 07:12:13 | examples-simple-prod/2-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:12:20 | examples-simple-prod/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:12:21 | examples-simple-prod/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:12:21 | examples-simple-prod/2-smoke-test | job.batch/report-span created logger.go:42: 07:12:21 | examples-simple-prod/2-smoke-test | job.batch/check-span created logger.go:42: 07:12:33 | examples-simple-prod/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:12:33 | examples-simple-prod | examples-simple-prod events from ns kuttl-test-careful-jennet: logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:11:41 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestcarefuljennetsimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestcarefuljennetsimpleprod-1-794b4686b5 to 1 deployment-controller logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:11:42 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestcarefuljennetsimpleprod-1-794b4686b5 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestcarefuljennetsimpleprod-1-794b4ps5lx replicaset-controller logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:11:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcarefuljennetsimpleprod-1-794b4ps5lx Binding Scheduled Successfully assigned kuttl-test-careful-jennet/elasticsearch-cdm-kuttltestcarefuljennetsimpleprod-1-794b4ps5lx to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:11:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcarefuljennetsimpleprod-1-794b4ps5lx AddedInterface Add eth0 [10.129.2.54/23] from ovn-kubernetes logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:11:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcarefuljennetsimpleprod-1-794b4ps5lx.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:11:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcarefuljennetsimpleprod-1-794b4ps5lx.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:11:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcarefuljennetsimpleprod-1-794b4ps5lx.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:11:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcarefuljennetsimpleprod-1-794b4ps5lx.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:11:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcarefuljennetsimpleprod-1-794b4ps5lx.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:11:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcarefuljennetsimpleprod-1-794b4ps5lx.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:11:52 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestcarefuljennetsimpleprod-1-794b4ps5lx.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:11:57 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestcarefuljennetsimpleprod-1-794b4ps5lx.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:08 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-h7rhd Binding Scheduled Successfully assigned kuttl-test-careful-jennet/simple-prod-collector-5499b86c46-h7rhd to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:08 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5499b86c46 SuccessfulCreate Created pod: simple-prod-collector-5499b86c46-h7rhd replicaset-controller logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:08 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-5499b86c46 to 1 deployment-controller logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:08 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-78695d7f75 to 1 deployment-controller logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:09 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-h7rhd AddedInterface Add eth0 [10.128.2.54/23] from ovn-kubernetes logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:09 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-h7rhd.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:09 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-h7rhd.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:09 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-h7rhd.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:09 +0000 UTC Normal Pod simple-prod-query-78695d7f75-wngz6 Binding Scheduled Successfully assigned kuttl-test-careful-jennet/simple-prod-query-78695d7f75-wngz6 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:09 +0000 UTC Normal Pod simple-prod-query-78695d7f75-wngz6 AddedInterface Add eth0 [10.131.0.58/23] from ovn-kubernetes logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:09 +0000 UTC Normal Pod simple-prod-query-78695d7f75-wngz6.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:09 +0000 UTC Normal Pod simple-prod-query-78695d7f75-wngz6.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:09 +0000 UTC Normal Pod simple-prod-query-78695d7f75-wngz6.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:09 +0000 UTC Normal Pod simple-prod-query-78695d7f75-wngz6.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:09 +0000 UTC Normal Pod simple-prod-query-78695d7f75-wngz6.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:09 +0000 UTC Normal Pod simple-prod-query-78695d7f75-wngz6.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:09 +0000 UTC Normal Pod simple-prod-query-78695d7f75-wngz6.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:09 +0000 UTC Normal Pod simple-prod-query-78695d7f75-wngz6.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:09 +0000 UTC Normal Pod simple-prod-query-78695d7f75-wngz6.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:09 +0000 UTC Normal ReplicaSet.apps simple-prod-query-78695d7f75 SuccessfulCreate Created pod: simple-prod-query-78695d7f75-wngz6 replicaset-controller logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:17 +0000 UTC Normal Pod simple-prod-query-748ccb45dd-6nsrr Binding Scheduled Successfully assigned kuttl-test-careful-jennet/simple-prod-query-748ccb45dd-6nsrr to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:17 +0000 UTC Normal ReplicaSet.apps simple-prod-query-748ccb45dd SuccessfulCreate Created pod: simple-prod-query-748ccb45dd-6nsrr replicaset-controller logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:17 +0000 UTC Normal Pod simple-prod-query-78695d7f75-wngz6.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:17 +0000 UTC Normal Pod simple-prod-query-78695d7f75-wngz6.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:17 +0000 UTC Normal Pod simple-prod-query-78695d7f75-wngz6.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:17 +0000 UTC Normal ReplicaSet.apps simple-prod-query-78695d7f75 SuccessfulDelete Deleted pod: simple-prod-query-78695d7f75-wngz6 replicaset-controller logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:17 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-78695d7f75 to 0 from 1 deployment-controller logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:17 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-748ccb45dd to 1 deployment-controller logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:18 +0000 UTC Normal Pod simple-prod-query-748ccb45dd-6nsrr AddedInterface Add eth0 [10.131.0.59/23] from ovn-kubernetes logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:18 +0000 UTC Normal Pod simple-prod-query-748ccb45dd-6nsrr.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:18 +0000 UTC Normal Pod simple-prod-query-748ccb45dd-6nsrr.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:18 +0000 UTC Normal Pod simple-prod-query-748ccb45dd-6nsrr.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:18 +0000 UTC Normal Pod simple-prod-query-748ccb45dd-6nsrr.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:18 +0000 UTC Normal Pod simple-prod-query-748ccb45dd-6nsrr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:18 +0000 UTC Normal Pod simple-prod-query-748ccb45dd-6nsrr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:18 +0000 UTC Normal Pod simple-prod-query-748ccb45dd-6nsrr.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:18 +0000 UTC Normal Pod simple-prod-query-748ccb45dd-6nsrr.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:18 +0000 UTC Normal Pod simple-prod-query-748ccb45dd-6nsrr.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:21 +0000 UTC Normal Pod check-span-blk4b Binding Scheduled Successfully assigned kuttl-test-careful-jennet/check-span-blk4b to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:21 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-blk4b job-controller logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:21 +0000 UTC Normal Pod report-span-rzwr4 Binding Scheduled Successfully assigned kuttl-test-careful-jennet/report-span-rzwr4 to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:21 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-rzwr4 job-controller logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:22 +0000 UTC Normal Pod check-span-blk4b AddedInterface Add eth0 [10.128.2.56/23] from ovn-kubernetes logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:22 +0000 UTC Normal Pod check-span-blk4b.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:22 +0000 UTC Normal Pod check-span-blk4b.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:22 +0000 UTC Normal Pod check-span-blk4b.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:22 +0000 UTC Normal Pod report-span-rzwr4 AddedInterface Add eth0 [10.128.2.55/23] from ovn-kubernetes logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:22 +0000 UTC Normal Pod report-span-rzwr4.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:22 +0000 UTC Normal Pod report-span-rzwr4.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:22 +0000 UTC Normal Pod report-span-rzwr4.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:24 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:24 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:24 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:12:33 | examples-simple-prod | 2023-09-25 07:12:33 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:12:33 | examples-simple-prod | Deleting namespace: kuttl-test-careful-jennet === CONT kuttl/harness/examples-business-application-injected-sidecar logger.go:42: 07:12:45 | examples-business-application-injected-sidecar | Creating namespace: kuttl-test-loyal-cicada logger.go:42: 07:12:45 | examples-business-application-injected-sidecar/0-install | starting test step 0-install logger.go:42: 07:12:46 | examples-business-application-injected-sidecar/0-install | Deployment:kuttl-test-loyal-cicada/myapp created logger.go:42: 07:12:46 | examples-business-application-injected-sidecar/0-install | test step completed 0-install logger.go:42: 07:12:46 | examples-business-application-injected-sidecar/1-install | starting test step 1-install logger.go:42: 07:12:46 | examples-business-application-injected-sidecar/1-install | Jaeger:kuttl-test-loyal-cicada/simplest created logger.go:42: 07:12:56 | examples-business-application-injected-sidecar/1-install | test step completed 1-install logger.go:42: 07:12:56 | examples-business-application-injected-sidecar/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:12:56 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 07:12:57 | examples-business-application-injected-sidecar/2-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:13:04 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:13:04 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:13:05 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/report-span created logger.go:42: 07:13:05 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/check-span created logger.go:42: 07:13:17 | examples-business-application-injected-sidecar/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | examples-business-application-injected-sidecar events from ns kuttl-test-loyal-cicada: logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:46 +0000 UTC Normal Pod myapp-7c764668bd-8m476 Binding Scheduled Successfully assigned kuttl-test-loyal-cicada/myapp-7c764668bd-8m476 to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:46 +0000 UTC Normal ReplicaSet.apps myapp-7c764668bd SuccessfulCreate Created pod: myapp-7c764668bd-8m476 replicaset-controller logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:46 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-7c764668bd to 1 deployment-controller logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:47 +0000 UTC Normal Pod myapp-7c764668bd-8m476 AddedInterface Add eth0 [10.128.2.57/23] from ovn-kubernetes logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:47 +0000 UTC Normal Pod myapp-7c764668bd-8m476.spec.containers{myapp} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:49 +0000 UTC Normal Pod myapp-787dcd4766-xcvbx Binding Scheduled Successfully assigned kuttl-test-loyal-cicada/myapp-787dcd4766-xcvbx to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:49 +0000 UTC Normal ReplicaSet.apps myapp-787dcd4766 SuccessfulCreate Created pod: myapp-787dcd4766-xcvbx replicaset-controller logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:49 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-787dcd4766 to 1 deployment-controller logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:50 +0000 UTC Warning Pod myapp-787dcd4766-xcvbx FailedMount MountVolume.SetUp failed for volume "simplest-service-ca" : configmap "simplest-service-ca" not found kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:50 +0000 UTC Warning Pod myapp-787dcd4766-xcvbx FailedMount MountVolume.SetUp failed for volume "simplest-trusted-ca" : configmap "simplest-trusted-ca" not found kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:50 +0000 UTC Normal Pod myapp-7c764668bd-8m476.spec.containers{myapp} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.606239965s (3.606250955s including waiting) kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:50 +0000 UTC Normal Pod myapp-7c764668bd-8m476.spec.containers{myapp} Created Created container myapp kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:50 +0000 UTC Normal Pod myapp-7c764668bd-8m476.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:54 +0000 UTC Normal Pod simplest-dc6c7475-k2chz Binding Scheduled Successfully assigned kuttl-test-loyal-cicada/simplest-dc6c7475-k2chz to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:54 +0000 UTC Normal ReplicaSet.apps simplest-dc6c7475 SuccessfulCreate Created pod: simplest-dc6c7475-k2chz replicaset-controller logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:54 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-dc6c7475 to 1 deployment-controller logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:55 +0000 UTC Normal Pod simplest-dc6c7475-k2chz AddedInterface Add eth0 [10.128.2.58/23] from ovn-kubernetes logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:55 +0000 UTC Normal Pod simplest-dc6c7475-k2chz.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:55 +0000 UTC Normal Pod simplest-dc6c7475-k2chz.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:55 +0000 UTC Normal Pod simplest-dc6c7475-k2chz.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:55 +0000 UTC Normal Pod simplest-dc6c7475-k2chz.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:55 +0000 UTC Normal Pod simplest-dc6c7475-k2chz.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:55 +0000 UTC Normal Pod simplest-dc6c7475-k2chz.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:58 +0000 UTC Normal Pod myapp-787dcd4766-xcvbx AddedInterface Add eth0 [10.131.0.60/23] from ovn-kubernetes logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:58 +0000 UTC Normal Pod myapp-787dcd4766-xcvbx.spec.containers{myapp} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:12:58 +0000 UTC Warning Pod myapp-7c764668bd-8m476.spec.containers{myapp} Unhealthy Liveness probe failed: Get "http://10.128.2.57:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:01 +0000 UTC Normal Pod myapp-787dcd4766-xcvbx.spec.containers{myapp} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.753768814s (3.753784294s including waiting) kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:01 +0000 UTC Normal Pod simplest-dc6c7475-k2chz.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:01 +0000 UTC Normal Pod simplest-dc6c7475-k2chz.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:01 +0000 UTC Normal ReplicaSet.apps simplest-dc6c7475 SuccessfulDelete Deleted pod: simplest-dc6c7475-k2chz replicaset-controller logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:01 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-dc6c7475 to 0 from 1 deployment-controller logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:02 +0000 UTC Normal Pod myapp-787dcd4766-xcvbx.spec.containers{myapp} Created Created container myapp kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:02 +0000 UTC Normal Pod myapp-787dcd4766-xcvbx.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:02 +0000 UTC Normal Pod myapp-787dcd4766-xcvbx.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:02 +0000 UTC Normal Pod myapp-787dcd4766-xcvbx.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:02 +0000 UTC Normal Pod myapp-787dcd4766-xcvbx.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:02 +0000 UTC Normal Pod simplest-7585f8c4b9-tc6dh Binding Scheduled Successfully assigned kuttl-test-loyal-cicada/simplest-7585f8c4b9-tc6dh to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:02 +0000 UTC Normal Pod simplest-7585f8c4b9-tc6dh AddedInterface Add eth0 [10.128.2.59/23] from ovn-kubernetes logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:02 +0000 UTC Normal Pod simplest-7585f8c4b9-tc6dh.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:02 +0000 UTC Normal ReplicaSet.apps simplest-7585f8c4b9 SuccessfulCreate Created pod: simplest-7585f8c4b9-tc6dh replicaset-controller logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:02 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-7585f8c4b9 to 1 deployment-controller logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:03 +0000 UTC Normal Pod myapp-7c764668bd-8m476.spec.containers{myapp} Killing Stopping container myapp kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:03 +0000 UTC Normal ReplicaSet.apps myapp-7c764668bd SuccessfulDelete Deleted pod: myapp-7c764668bd-8m476 replicaset-controller logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:03 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled down replica set myapp-7c764668bd to 0 from 1 deployment-controller logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:03 +0000 UTC Normal Pod simplest-7585f8c4b9-tc6dh.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:03 +0000 UTC Normal Pod simplest-7585f8c4b9-tc6dh.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:03 +0000 UTC Normal Pod simplest-7585f8c4b9-tc6dh.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:03 +0000 UTC Normal Pod simplest-7585f8c4b9-tc6dh.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:03 +0000 UTC Normal Pod simplest-7585f8c4b9-tc6dh.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:05 +0000 UTC Normal Pod check-span-kd77c Binding Scheduled Successfully assigned kuttl-test-loyal-cicada/check-span-kd77c to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:05 +0000 UTC Normal Pod check-span-kd77c AddedInterface Add eth0 [10.128.2.60/23] from ovn-kubernetes logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:05 +0000 UTC Normal Pod check-span-kd77c.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:05 +0000 UTC Normal Pod check-span-kd77c.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:05 +0000 UTC Normal Pod check-span-kd77c.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:05 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-kd77c job-controller logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:05 +0000 UTC Normal Pod report-span-mqqwz Binding Scheduled Successfully assigned kuttl-test-loyal-cicada/report-span-mqqwz to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:05 +0000 UTC Normal Pod report-span-mqqwz AddedInterface Add eth0 [10.129.2.55/23] from ovn-kubernetes logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:05 +0000 UTC Normal Pod report-span-mqqwz.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:05 +0000 UTC Normal Pod report-span-mqqwz.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:05 +0000 UTC Normal Pod report-span-mqqwz.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:05 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-mqqwz job-controller logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:09 +0000 UTC Warning Pod myapp-787dcd4766-xcvbx.spec.containers{myapp} Unhealthy Liveness probe failed: Get "http://10.131.0.60:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | 2023-09-25 07:13:16 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:13:17 | examples-business-application-injected-sidecar | Deleting namespace: kuttl-test-loyal-cicada === CONT kuttl/harness/examples-openshift-with-htpasswd logger.go:42: 07:13:29 | examples-openshift-with-htpasswd | Ignoring 00-install.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:13:29 | examples-openshift-with-htpasswd | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:13:29 | examples-openshift-with-htpasswd | Creating namespace: kuttl-test-sincere-walrus logger.go:42: 07:13:29 | examples-openshift-with-htpasswd/0-install | starting test step 0-install logger.go:42: 07:13:29 | examples-openshift-with-htpasswd/0-install | Secret:kuttl-test-sincere-walrus/htpasswd created logger.go:42: 07:13:29 | examples-openshift-with-htpasswd/0-install | test step completed 0-install logger.go:42: 07:13:29 | examples-openshift-with-htpasswd/1-install | starting test step 1-install logger.go:42: 07:13:29 | examples-openshift-with-htpasswd/1-install | Jaeger:kuttl-test-sincere-walrus/with-htpasswd created logger.go:42: 07:13:35 | examples-openshift-with-htpasswd/1-install | test step completed 1-install logger.go:42: 07:13:35 | examples-openshift-with-htpasswd/2-check-unsecured | starting test step 2-check-unsecured logger.go:42: 07:13:35 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [./ensure-ingress-host.sh] logger.go:42: 07:13:35 | examples-openshift-with-htpasswd/2-check-unsecured | Checking the Ingress host value was populated logger.go:42: 07:13:35 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 0 logger.go:42: 07:13:35 | examples-openshift-with-htpasswd/2-check-unsecured | Hostname is with-htpasswd-kuttl-test-sincere-walrus.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com logger.go:42: 07:13:35 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 07:13:35 | examples-openshift-with-htpasswd/2-check-unsecured | Checking an expected HTTP response logger.go:42: 07:13:35 | examples-openshift-with-htpasswd/2-check-unsecured | Running in OpenShift logger.go:42: 07:13:35 | examples-openshift-with-htpasswd/2-check-unsecured | Not using any secret logger.go:42: 07:13:35 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 1/30 the https://with-htpasswd-kuttl-test-sincere-walrus.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 07:13:35 | examples-openshift-with-htpasswd/2-check-unsecured | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:13:35 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 2/30 the https://with-htpasswd-kuttl-test-sincere-walrus.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 07:13:35 | examples-openshift-with-htpasswd/2-check-unsecured | HTTP response is 503. 403 expected. Waiting 10 s logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 3/30 the https://with-htpasswd-kuttl-test-sincere-walrus.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/2-check-unsecured | curl response asserted properly logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/2-check-unsecured | test step completed 2-check-unsecured logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/3-check-unauthorized | starting test step 3-check-unauthorized logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [./ensure-ingress-host.sh] logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking the Ingress host value was populated logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 0 logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/3-check-unauthorized | Hostname is with-htpasswd-kuttl-test-sincere-walrus.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [sh -c JAEGER_USERNAME=wronguser JAEGER_PASSWORD=wrongpassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking an expected HTTP response logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/3-check-unauthorized | Running in OpenShift logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/3-check-unauthorized | Using Jaeger basic authentication logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 1/30 the https://with-htpasswd-kuttl-test-sincere-walrus.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/3-check-unauthorized | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 2/30 the https://with-htpasswd-kuttl-test-sincere-walrus.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/3-check-unauthorized | curl response asserted properly logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/3-check-unauthorized | test step completed 3-check-unauthorized logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/4-check-authorized | starting test step 4-check-authorized logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/4-check-authorized | running command: [./ensure-ingress-host.sh] logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/4-check-authorized | Checking the Ingress host value was populated logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/4-check-authorized | Try number 0 logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/4-check-authorized | Hostname is with-htpasswd-kuttl-test-sincere-walrus.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com logger.go:42: 07:13:45 | examples-openshift-with-htpasswd/4-check-authorized | running command: [sh -c JAEGER_USERNAME=awesomeuser JAEGER_PASSWORD=awesomepassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE with-htpasswd] logger.go:42: 07:13:46 | examples-openshift-with-htpasswd/4-check-authorized | Checking an expected HTTP response logger.go:42: 07:13:46 | examples-openshift-with-htpasswd/4-check-authorized | Running in OpenShift logger.go:42: 07:13:46 | examples-openshift-with-htpasswd/4-check-authorized | Using Jaeger basic authentication logger.go:42: 07:13:46 | examples-openshift-with-htpasswd/4-check-authorized | Try number 1/30 the https://with-htpasswd-kuttl-test-sincere-walrus.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 07:13:46 | examples-openshift-with-htpasswd/4-check-authorized | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:13:46 | examples-openshift-with-htpasswd/4-check-authorized | Try number 2/30 the https://with-htpasswd-kuttl-test-sincere-walrus.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 07:13:46 | examples-openshift-with-htpasswd/4-check-authorized | curl response asserted properly logger.go:42: 07:13:46 | examples-openshift-with-htpasswd/4-check-authorized | test step completed 4-check-authorized logger.go:42: 07:13:46 | examples-openshift-with-htpasswd | examples-openshift-with-htpasswd events from ns kuttl-test-sincere-walrus: logger.go:42: 07:13:46 | examples-openshift-with-htpasswd | 2023-09-25 07:13:32 +0000 UTC Normal Pod with-htpasswd-6987687bbc-hj4wx Binding Scheduled Successfully assigned kuttl-test-sincere-walrus/with-htpasswd-6987687bbc-hj4wx to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:13:46 | examples-openshift-with-htpasswd | 2023-09-25 07:13:32 +0000 UTC Normal ReplicaSet.apps with-htpasswd-6987687bbc SuccessfulCreate Created pod: with-htpasswd-6987687bbc-hj4wx replicaset-controller logger.go:42: 07:13:46 | examples-openshift-with-htpasswd | 2023-09-25 07:13:32 +0000 UTC Normal Deployment.apps with-htpasswd ScalingReplicaSet Scaled up replica set with-htpasswd-6987687bbc to 1 deployment-controller logger.go:42: 07:13:46 | examples-openshift-with-htpasswd | 2023-09-25 07:13:33 +0000 UTC Normal Pod with-htpasswd-6987687bbc-hj4wx AddedInterface Add eth0 [10.128.2.61/23] from ovn-kubernetes logger.go:42: 07:13:46 | examples-openshift-with-htpasswd | 2023-09-25 07:13:33 +0000 UTC Normal Pod with-htpasswd-6987687bbc-hj4wx.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:13:46 | examples-openshift-with-htpasswd | 2023-09-25 07:13:33 +0000 UTC Normal Pod with-htpasswd-6987687bbc-hj4wx.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:13:46 | examples-openshift-with-htpasswd | 2023-09-25 07:13:33 +0000 UTC Normal Pod with-htpasswd-6987687bbc-hj4wx.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:13:46 | examples-openshift-with-htpasswd | 2023-09-25 07:13:33 +0000 UTC Normal Pod with-htpasswd-6987687bbc-hj4wx.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:13:46 | examples-openshift-with-htpasswd | 2023-09-25 07:13:33 +0000 UTC Normal Pod with-htpasswd-6987687bbc-hj4wx.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:13:46 | examples-openshift-with-htpasswd | 2023-09-25 07:13:33 +0000 UTC Normal Pod with-htpasswd-6987687bbc-hj4wx.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:13:46 | examples-openshift-with-htpasswd | Deleting namespace: kuttl-test-sincere-walrus === CONT kuttl/harness/examples-openshift-agent-as-daemonset logger.go:42: 07:13:52 | examples-openshift-agent-as-daemonset | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:13:52 | examples-openshift-agent-as-daemonset | Creating namespace: kuttl-test-legible-koala logger.go:42: 07:13:52 | examples-openshift-agent-as-daemonset/0-install | starting test step 0-install logger.go:42: 07:13:52 | examples-openshift-agent-as-daemonset/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 07:13:52 | examples-openshift-agent-as-daemonset/0-install | ServiceAccount:kuttl-test-legible-koala/jaeger-agent-daemonset created logger.go:42: 07:13:52 | examples-openshift-agent-as-daemonset/0-install | test step completed 0-install logger.go:42: 07:13:52 | examples-openshift-agent-as-daemonset/1-add-policy | starting test step 1-add-policy logger.go:42: 07:13:52 | examples-openshift-agent-as-daemonset/1-add-policy | running command: [sh -c oc adm policy --namespace $NAMESPACE add-scc-to-user daemonset-with-hostport -z jaeger-agent-daemonset] logger.go:42: 07:13:52 | examples-openshift-agent-as-daemonset/1-add-policy | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:daemonset-with-hostport added: "jaeger-agent-daemonset" logger.go:42: 07:13:52 | examples-openshift-agent-as-daemonset/1-add-policy | running command: [sh -c sleep 5] logger.go:42: 07:13:57 | examples-openshift-agent-as-daemonset/1-add-policy | test step completed 1-add-policy logger.go:42: 07:13:57 | examples-openshift-agent-as-daemonset/2-install | starting test step 2-install logger.go:42: 07:13:57 | examples-openshift-agent-as-daemonset/2-install | Jaeger:kuttl-test-legible-koala/agent-as-daemonset created logger.go:42: 07:14:03 | examples-openshift-agent-as-daemonset/2-install | test step completed 2-install logger.go:42: 07:14:03 | examples-openshift-agent-as-daemonset/3-install | starting test step 3-install logger.go:42: 07:14:03 | examples-openshift-agent-as-daemonset/3-install | Deployment:kuttl-test-legible-koala/vertx-create-span-sidecar created logger.go:42: 07:14:04 | examples-openshift-agent-as-daemonset/3-install | test step completed 3-install logger.go:42: 07:14:04 | examples-openshift-agent-as-daemonset/4-find-service | starting test step 4-find-service logger.go:42: 07:14:04 | examples-openshift-agent-as-daemonset/4-find-service | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 07:14:05 | examples-openshift-agent-as-daemonset/4-find-service | Warning: resource jaegers/agent-as-daemonset is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:14:12 | examples-openshift-agent-as-daemonset/4-find-service | running command: [sh -c SERVICE_NAME=order ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JOB_NUMBER=00 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o find-service-00-job.yaml] logger.go:42: 07:14:13 | examples-openshift-agent-as-daemonset/4-find-service | running command: [sh -c kubectl create -f find-service-00-job.yaml -n $NAMESPACE] logger.go:42: 07:14:13 | examples-openshift-agent-as-daemonset/4-find-service | job.batch/00-find-service created logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset/4-find-service | test step completed 4-find-service logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | examples-openshift-agent-as-daemonset events from ns kuttl-test-legible-koala: logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:00 +0000 UTC Normal Pod agent-as-daemonset-76597d8b68-bbntf Binding Scheduled Successfully assigned kuttl-test-legible-koala/agent-as-daemonset-76597d8b68-bbntf to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:00 +0000 UTC Warning Pod agent-as-daemonset-76597d8b68-bbntf FailedMount MountVolume.SetUp failed for volume "agent-as-daemonset-ui-oauth-proxy-tls" : secret "agent-as-daemonset-ui-oauth-proxy-tls" not found kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:00 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-76597d8b68 SuccessfulCreate Created pod: agent-as-daemonset-76597d8b68-bbntf replicaset-controller logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:00 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-76597d8b68 to 1 deployment-controller logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:01 +0000 UTC Normal Pod agent-as-daemonset-76597d8b68-bbntf AddedInterface Add eth0 [10.128.2.62/23] from ovn-kubernetes logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:01 +0000 UTC Normal Pod agent-as-daemonset-76597d8b68-bbntf.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:01 +0000 UTC Normal Pod agent-as-daemonset-76597d8b68-bbntf.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:01 +0000 UTC Normal Pod agent-as-daemonset-76597d8b68-bbntf.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:01 +0000 UTC Normal Pod agent-as-daemonset-76597d8b68-bbntf.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:01 +0000 UTC Normal Pod agent-as-daemonset-76597d8b68-bbntf.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:01 +0000 UTC Normal Pod agent-as-daemonset-76597d8b68-bbntf.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:03 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-2lhnt Binding Scheduled Successfully assigned kuttl-test-legible-koala/agent-as-daemonset-agent-daemonset-2lhnt to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:03 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-jz5p4 Binding Scheduled Successfully assigned kuttl-test-legible-koala/agent-as-daemonset-agent-daemonset-jz5p4 to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:03 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-rbgfb Binding Scheduled Successfully assigned kuttl-test-legible-koala/agent-as-daemonset-agent-daemonset-rbgfb to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:03 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-2lhnt daemonset-controller logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:03 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-jz5p4 daemonset-controller logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:03 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-rbgfb daemonset-controller logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:03 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-7qnnc Binding Scheduled Successfully assigned kuttl-test-legible-koala/vertx-create-span-sidecar-54946f4fd-7qnnc to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:03 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-7qnnc AddedInterface Add eth0 [10.131.0.61/23] from ovn-kubernetes logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:03 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-7qnnc.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:03 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-7qnnc.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:03 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-7qnnc.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:03 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-54946f4fd SuccessfulCreate Created pod: vertx-create-span-sidecar-54946f4fd-7qnnc replicaset-controller logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:03 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-54946f4fd to 1 deployment-controller logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:04 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-2lhnt AddedInterface Add eth0 [10.131.0.62/23] from ovn-kubernetes logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:04 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-2lhnt.spec.containers{jaeger-agent-daemonset} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:04 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-2lhnt.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:04 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-2lhnt.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:04 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-jz5p4 AddedInterface Add eth0 [10.128.2.63/23] from ovn-kubernetes logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:04 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-jz5p4.spec.containers{jaeger-agent-daemonset} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:04 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-jz5p4.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:04 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-jz5p4.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:04 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-rbgfb AddedInterface Add eth0 [10.129.2.56/23] from ovn-kubernetes logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:04 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-rbgfb.spec.containers{jaeger-agent-daemonset} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:07 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-rbgfb.spec.containers{jaeger-agent-daemonset} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" in 3.170692718s (3.170708498s including waiting) kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:07 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-rbgfb.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:07 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-rbgfb.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:09 +0000 UTC Normal Pod agent-as-daemonset-76597d8b68-bbntf.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:09 +0000 UTC Normal Pod agent-as-daemonset-76597d8b68-bbntf.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:09 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-76597d8b68 SuccessfulDelete Deleted pod: agent-as-daemonset-76597d8b68-bbntf replicaset-controller logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:09 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled down replica set agent-as-daemonset-76597d8b68 to 0 from 1 deployment-controller logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:10 +0000 UTC Normal Pod agent-as-daemonset-658787fbfd-5d98z Binding Scheduled Successfully assigned kuttl-test-legible-koala/agent-as-daemonset-658787fbfd-5d98z to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:10 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-658787fbfd SuccessfulCreate Created pod: agent-as-daemonset-658787fbfd-5d98z replicaset-controller logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:10 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-658787fbfd to 1 deployment-controller logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:11 +0000 UTC Normal Pod agent-as-daemonset-658787fbfd-5d98z AddedInterface Add eth0 [10.128.2.64/23] from ovn-kubernetes logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:11 +0000 UTC Normal Pod agent-as-daemonset-658787fbfd-5d98z.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:11 +0000 UTC Normal Pod agent-as-daemonset-658787fbfd-5d98z.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:11 +0000 UTC Normal Pod agent-as-daemonset-658787fbfd-5d98z.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:11 +0000 UTC Normal Pod agent-as-daemonset-658787fbfd-5d98z.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:11 +0000 UTC Normal Pod agent-as-daemonset-658787fbfd-5d98z.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:11 +0000 UTC Normal Pod agent-as-daemonset-658787fbfd-5d98z.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:11 +0000 UTC Warning Pod vertx-create-span-sidecar-54946f4fd-7qnnc.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.61:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:11 +0000 UTC Warning Pod vertx-create-span-sidecar-54946f4fd-7qnnc.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.61:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:13 +0000 UTC Normal Pod 00-find-service-9bzrv Binding Scheduled Successfully assigned kuttl-test-legible-koala/00-find-service-9bzrv to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:13 +0000 UTC Normal Pod 00-find-service-9bzrv AddedInterface Add eth0 [10.129.2.57/23] from ovn-kubernetes logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:13 +0000 UTC Normal Pod 00-find-service-9bzrv.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:13 +0000 UTC Normal Pod 00-find-service-9bzrv.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:13 +0000 UTC Normal Pod 00-find-service-9bzrv.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:13 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-9bzrv job-controller logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:13 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-7qnnc.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:14 +0000 UTC Warning Pod vertx-create-span-sidecar-54946f4fd-7qnnc.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.61:8080/": read tcp 10.131.0.2:48012->10.131.0.61:8080: read: connection reset by peer kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:14 +0000 UTC Warning Pod vertx-create-span-sidecar-54946f4fd-7qnnc.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.61:8080/": dial tcp 10.131.0.61:8080: connect: connection refused kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:25 +0000 UTC Warning Pod vertx-create-span-sidecar-54946f4fd-7qnnc.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.61:8080/": read tcp 10.131.0.2:52594->10.131.0.61:8080: read: connection reset by peer kubelet logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | 2023-09-25 07:14:41 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 07:14:41 | examples-openshift-agent-as-daemonset | Deleting namespace: kuttl-test-legible-koala === CONT kuttl/harness/examples-collector-with-priority-class logger.go:42: 07:14:47 | examples-collector-with-priority-class | Creating namespace: kuttl-test-primary-cub logger.go:42: 07:14:47 | examples-collector-with-priority-class/0-install | starting test step 0-install logger.go:42: 07:14:47 | examples-collector-with-priority-class/0-install | PriorityClass:/collector-high-priority created logger.go:42: 07:14:47 | examples-collector-with-priority-class/0-install | Jaeger:kuttl-test-primary-cub/collector-with-high-priority created logger.go:42: 07:14:53 | examples-collector-with-priority-class/0-install | test step completed 0-install logger.go:42: 07:14:53 | examples-collector-with-priority-class/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:14:53 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE collector-with-high-priority /dev/null] logger.go:42: 07:14:55 | examples-collector-with-priority-class/1-smoke-test | Warning: resource jaegers/collector-with-high-priority is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:15:01 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:15:01 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:15:02 | examples-collector-with-priority-class/1-smoke-test | job.batch/report-span created logger.go:42: 07:15:02 | examples-collector-with-priority-class/1-smoke-test | job.batch/check-span created logger.go:42: 07:15:14 | examples-collector-with-priority-class/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:15:14 | examples-collector-with-priority-class | examples-collector-with-priority-class events from ns kuttl-test-primary-cub: logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:51 +0000 UTC Normal Pod collector-with-high-priority-748ddc5d4b-tfrvk Binding Scheduled Successfully assigned kuttl-test-primary-cub/collector-with-high-priority-748ddc5d4b-tfrvk to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:51 +0000 UTC Warning Pod collector-with-high-priority-748ddc5d4b-tfrvk FailedMount MountVolume.SetUp failed for volume "collector-with-high-priority-collector-tls-config-volume" : secret "collector-with-high-priority-collector-headless-tls" not found kubelet logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:51 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-748ddc5d4b SuccessfulCreate Created pod: collector-with-high-priority-748ddc5d4b-tfrvk replicaset-controller logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:51 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled up replica set collector-with-high-priority-748ddc5d4b to 1 deployment-controller logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:52 +0000 UTC Normal Pod collector-with-high-priority-748ddc5d4b-tfrvk AddedInterface Add eth0 [10.131.0.63/23] from ovn-kubernetes logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:52 +0000 UTC Normal Pod collector-with-high-priority-748ddc5d4b-tfrvk.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:52 +0000 UTC Normal Pod collector-with-high-priority-748ddc5d4b-tfrvk.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:52 +0000 UTC Normal Pod collector-with-high-priority-748ddc5d4b-tfrvk.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:52 +0000 UTC Normal Pod collector-with-high-priority-748ddc5d4b-tfrvk.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:52 +0000 UTC Normal Pod collector-with-high-priority-748ddc5d4b-tfrvk.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:52 +0000 UTC Normal Pod collector-with-high-priority-748ddc5d4b-tfrvk.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:57 +0000 UTC Normal Pod collector-with-high-priority-748ddc5d4b-tfrvk.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:57 +0000 UTC Normal Pod collector-with-high-priority-748ddc5d4b-tfrvk.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:57 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-748ddc5d4b SuccessfulDelete Deleted pod: collector-with-high-priority-748ddc5d4b-tfrvk replicaset-controller logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:57 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled down replica set collector-with-high-priority-748ddc5d4b to 0 from 1 deployment-controller logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:58 +0000 UTC Normal Pod collector-with-high-priority-fd988566d-j8j2s Binding Scheduled Successfully assigned kuttl-test-primary-cub/collector-with-high-priority-fd988566d-j8j2s to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:58 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-fd988566d SuccessfulCreate Created pod: collector-with-high-priority-fd988566d-j8j2s replicaset-controller logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:58 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled up replica set collector-with-high-priority-fd988566d to 1 deployment-controller logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:59 +0000 UTC Normal Pod collector-with-high-priority-fd988566d-j8j2s AddedInterface Add eth0 [10.128.2.65/23] from ovn-kubernetes logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:59 +0000 UTC Normal Pod collector-with-high-priority-fd988566d-j8j2s.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:59 +0000 UTC Normal Pod collector-with-high-priority-fd988566d-j8j2s.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:59 +0000 UTC Normal Pod collector-with-high-priority-fd988566d-j8j2s.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:59 +0000 UTC Normal Pod collector-with-high-priority-fd988566d-j8j2s.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:59 +0000 UTC Normal Pod collector-with-high-priority-fd988566d-j8j2s.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:14:59 +0000 UTC Normal Pod collector-with-high-priority-fd988566d-j8j2s.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:15:02 +0000 UTC Normal Pod check-span-sxcqz Binding Scheduled Successfully assigned kuttl-test-primary-cub/check-span-sxcqz to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:15:02 +0000 UTC Normal Pod check-span-sxcqz AddedInterface Add eth0 [10.131.0.65/23] from ovn-kubernetes logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:15:02 +0000 UTC Normal Pod check-span-sxcqz.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:15:02 +0000 UTC Normal Pod check-span-sxcqz.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:15:02 +0000 UTC Normal Pod check-span-sxcqz.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:15:02 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-sxcqz job-controller logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:15:02 +0000 UTC Normal Pod report-span-2hsv8 Binding Scheduled Successfully assigned kuttl-test-primary-cub/report-span-2hsv8 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:15:02 +0000 UTC Normal Pod report-span-2hsv8 AddedInterface Add eth0 [10.131.0.64/23] from ovn-kubernetes logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:15:02 +0000 UTC Normal Pod report-span-2hsv8.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:15:02 +0000 UTC Normal Pod report-span-2hsv8.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:15:02 +0000 UTC Normal Pod report-span-2hsv8.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:15:02 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-2hsv8 job-controller logger.go:42: 07:15:14 | examples-collector-with-priority-class | 2023-09-25 07:15:13 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:15:14 | examples-collector-with-priority-class | Deleting namespace: kuttl-test-primary-cub === CONT kuttl/harness/examples-all-in-one-with-options logger.go:42: 07:15:26 | examples-all-in-one-with-options | Creating namespace: kuttl-test-thorough-satyr logger.go:42: 07:15:26 | examples-all-in-one-with-options/0-install | starting test step 0-install logger.go:42: 07:15:26 | examples-all-in-one-with-options/0-install | Jaeger:kuttl-test-thorough-satyr/my-jaeger created logger.go:42: 07:15:32 | examples-all-in-one-with-options/0-install | test step completed 0-install logger.go:42: 07:15:32 | examples-all-in-one-with-options/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:15:32 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:15:33 | examples-all-in-one-with-options/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:15:39 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443/jaeger MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:15:40 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:15:40 | examples-all-in-one-with-options/1-smoke-test | job.batch/report-span created logger.go:42: 07:15:40 | examples-all-in-one-with-options/1-smoke-test | job.batch/check-span created logger.go:42: 07:15:51 | examples-all-in-one-with-options/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:15:51 | examples-all-in-one-with-options | examples-all-in-one-with-options events from ns kuttl-test-thorough-satyr: logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:29 +0000 UTC Normal Pod my-jaeger-647b8d59b4-ztx6v Binding Scheduled Successfully assigned kuttl-test-thorough-satyr/my-jaeger-647b8d59b4-ztx6v to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:29 +0000 UTC Normal ReplicaSet.apps my-jaeger-647b8d59b4 SuccessfulCreate Created pod: my-jaeger-647b8d59b4-ztx6v replicaset-controller logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:29 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-647b8d59b4 to 1 deployment-controller logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:30 +0000 UTC Normal Pod my-jaeger-647b8d59b4-ztx6v AddedInterface Add eth0 [10.128.2.66/23] from ovn-kubernetes logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:30 +0000 UTC Normal Pod my-jaeger-647b8d59b4-ztx6v.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:30 +0000 UTC Normal Pod my-jaeger-647b8d59b4-ztx6v.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:30 +0000 UTC Normal Pod my-jaeger-647b8d59b4-ztx6v.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:30 +0000 UTC Normal Pod my-jaeger-647b8d59b4-ztx6v.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:30 +0000 UTC Normal Pod my-jaeger-647b8d59b4-ztx6v.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:30 +0000 UTC Normal Pod my-jaeger-647b8d59b4-ztx6v.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:34 +0000 UTC Normal Pod my-jaeger-647b8d59b4-ztx6v.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:34 +0000 UTC Normal Pod my-jaeger-647b8d59b4-ztx6v.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:34 +0000 UTC Normal ReplicaSet.apps my-jaeger-647b8d59b4 SuccessfulDelete Deleted pod: my-jaeger-647b8d59b4-ztx6v replicaset-controller logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:34 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-647b8d59b4 to 0 from 1 deployment-controller logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:35 +0000 UTC Normal Pod my-jaeger-5548cd44fc-sqzrg Binding Scheduled Successfully assigned kuttl-test-thorough-satyr/my-jaeger-5548cd44fc-sqzrg to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:35 +0000 UTC Normal ReplicaSet.apps my-jaeger-5548cd44fc SuccessfulCreate Created pod: my-jaeger-5548cd44fc-sqzrg replicaset-controller logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:35 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-5548cd44fc to 1 deployment-controller logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:36 +0000 UTC Normal Pod my-jaeger-5548cd44fc-sqzrg AddedInterface Add eth0 [10.128.2.67/23] from ovn-kubernetes logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:36 +0000 UTC Normal Pod my-jaeger-5548cd44fc-sqzrg.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:36 +0000 UTC Normal Pod my-jaeger-5548cd44fc-sqzrg.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:36 +0000 UTC Normal Pod my-jaeger-5548cd44fc-sqzrg.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:36 +0000 UTC Normal Pod my-jaeger-5548cd44fc-sqzrg.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:36 +0000 UTC Normal Pod my-jaeger-5548cd44fc-sqzrg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:36 +0000 UTC Normal Pod my-jaeger-5548cd44fc-sqzrg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:40 +0000 UTC Normal Pod check-span-qtt44 Binding Scheduled Successfully assigned kuttl-test-thorough-satyr/check-span-qtt44 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:40 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-qtt44 job-controller logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:40 +0000 UTC Normal Pod report-span-vhtx7 Binding Scheduled Successfully assigned kuttl-test-thorough-satyr/report-span-vhtx7 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:40 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-vhtx7 job-controller logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:41 +0000 UTC Normal Pod check-span-qtt44 AddedInterface Add eth0 [10.131.0.67/23] from ovn-kubernetes logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:41 +0000 UTC Normal Pod check-span-qtt44.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:41 +0000 UTC Normal Pod check-span-qtt44.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:41 +0000 UTC Normal Pod check-span-qtt44.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:41 +0000 UTC Normal Pod report-span-vhtx7 AddedInterface Add eth0 [10.131.0.66/23] from ovn-kubernetes logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:41 +0000 UTC Normal Pod report-span-vhtx7.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:41 +0000 UTC Normal Pod report-span-vhtx7.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:41 +0000 UTC Normal Pod report-span-vhtx7.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:15:51 | examples-all-in-one-with-options | 2023-09-25 07:15:51 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:15:51 | examples-all-in-one-with-options | Deleting namespace: kuttl-test-thorough-satyr === CONT kuttl/harness/examples-auto-provision-kafka logger.go:42: 07:16:03 | examples-auto-provision-kafka | Creating namespace: kuttl-test-immune-flea logger.go:42: 07:16:03 | examples-auto-provision-kafka/2-install | starting test step 2-install logger.go:42: 07:16:03 | examples-auto-provision-kafka/2-install | Jaeger:kuttl-test-immune-flea/auto-provision-kafka created logger.go:42: 07:17:06 | examples-auto-provision-kafka/2-install | test step completed 2-install logger.go:42: 07:17:06 | examples-auto-provision-kafka/3- | starting test step 3- logger.go:42: 07:17:34 | examples-auto-provision-kafka/3- | test step completed 3- logger.go:42: 07:17:34 | examples-auto-provision-kafka/4- | starting test step 4- logger.go:42: 07:18:15 | examples-auto-provision-kafka/4- | test step completed 4- logger.go:42: 07:18:15 | examples-auto-provision-kafka/5- | starting test step 5- logger.go:42: 07:18:25 | examples-auto-provision-kafka/5- | test step completed 5- logger.go:42: 07:18:25 | examples-auto-provision-kafka/6-smoke-test | starting test step 6-smoke-test logger.go:42: 07:18:25 | examples-auto-provision-kafka/6-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE auto-provision-kafka /dev/null] logger.go:42: 07:18:26 | examples-auto-provision-kafka/6-smoke-test | Warning: resource jaegers/auto-provision-kafka is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:18:32 | examples-auto-provision-kafka/6-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JAEGER_COLLECTOR_ENDPOINT=http://auto-provision-kafka-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://auto-provision-kafka-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:18:33 | examples-auto-provision-kafka/6-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:18:33 | examples-auto-provision-kafka/6-smoke-test | job.batch/report-span created logger.go:42: 07:18:33 | examples-auto-provision-kafka/6-smoke-test | job.batch/check-span created logger.go:42: 07:18:45 | examples-auto-provision-kafka/6-smoke-test | test step completed 6-smoke-test logger.go:42: 07:18:45 | examples-auto-provision-kafka | examples-auto-provision-kafka events from ns kuttl-test-immune-flea: logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:10 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestimmunefleaautoprovisionkafka-1-c857fff47 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestimmunefleaautoprovisionkafka-1-j55m9 replicaset-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmunefleaautoprovisionkafka-1-j55m9 Binding Scheduled Successfully assigned kuttl-test-immune-flea/elasticsearch-cdm-kuttltestimmunefleaautoprovisionkafka-1-j55m9 to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmunefleaautoprovisionkafka-1-j55m9 AddedInterface Add eth0 [10.129.2.59/23] from ovn-kubernetes logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmunefleaautoprovisionkafka-1-j55m9.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmunefleaautoprovisionkafka-1-j55m9.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmunefleaautoprovisionkafka-1-j55m9.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmunefleaautoprovisionkafka-1-j55m9.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:10 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestimmunefleaautoprovisionkafka-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestimmunefleaautoprovisionkafka-1-c857fff47 to 1 deployment-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:11 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmunefleaautoprovisionkafka-1-j55m9.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:11 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmunefleaautoprovisionkafka-1-j55m9.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:20 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestimmunefleaautoprovisionkafka-1-j55m9.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:26 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestimmunefleaautoprovisionkafka-1-j55m9.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:37 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:38 +0000 UTC Normal PodDisruptionBudget.policy auto-provision-kafka-zookeeper NoPods No matching pods found controllermanager logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:38 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 ExternalProvisioning waiting for a volume to be created, either by external provisioner "ebs.csi.aws.com" or manually created by system administrator persistentvolume-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:38 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-immune-flea/data-auto-provision-kafka-zookeeper-0" logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:41 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-634cc5ab-3bc0-4148-a0e9-72702fee2eea logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:42 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-immune-flea/auto-provision-kafka-zookeeper-0 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:44 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-634cc5ab-3bc0-4148-a0e9-72702fee2eea" attachdetach-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:45 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0 AddedInterface Add eth0 [10.131.0.68/23] from ovn-kubernetes logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:45 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0.spec.containers{zookeeper} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" already present on machine kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:45 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:16:45 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:06 +0000 UTC Normal PodDisruptionBudget.policy auto-provision-kafka-kafka NoPods No matching pods found controllermanager logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:06 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:06 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 ExternalProvisioning waiting for a volume to be created, either by external provisioner "ebs.csi.aws.com" or manually created by system administrator persistentvolume-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:06 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-immune-flea/data-0-auto-provision-kafka-kafka-0" logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:10 +0000 UTC Normal Pod auto-provision-kafka-kafka-0 Binding Scheduled Successfully assigned kuttl-test-immune-flea/auto-provision-kafka-kafka-0 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:10 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-b5aeae97-d1e2-497a-a072-6b28e0f1736e logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:13 +0000 UTC Normal Pod auto-provision-kafka-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-b5aeae97-d1e2-497a-a072-6b28e0f1736e" attachdetach-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:14 +0000 UTC Normal Pod auto-provision-kafka-kafka-0 AddedInterface Add eth0 [10.131.0.69/23] from ovn-kubernetes logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:14 +0000 UTC Normal Pod auto-provision-kafka-kafka-0.spec.containers{kafka} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" already present on machine kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:14 +0000 UTC Normal Pod auto-provision-kafka-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:14 +0000 UTC Normal Pod auto-provision-kafka-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:34 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-65984cbf8c-8rt9l Binding Scheduled Successfully assigned kuttl-test-immune-flea/auto-provision-kafka-entity-operator-65984cbf8c-8rt9l to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:34 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-65984cbf8c-8rt9l AddedInterface Add eth0 [10.128.2.68/23] from ovn-kubernetes logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:34 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-65984cbf8c-8rt9l.spec.containers{topic-operator} Pulling Pulling image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:abdf1ca15c55860e7962057fcb84d2eef800996fcbc3f9f80eeb7efa79dbafcc" kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:34 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-entity-operator-65984cbf8c SuccessfulCreate Created pod: auto-provision-kafka-entity-operator-65984cbf8c-8rt9l replicaset-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:34 +0000 UTC Normal Deployment.apps auto-provision-kafka-entity-operator ScalingReplicaSet Scaled up replica set auto-provision-kafka-entity-operator-65984cbf8c to 1 deployment-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:41 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-65984cbf8c-8rt9l.spec.containers{topic-operator} Pulled Successfully pulled image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:abdf1ca15c55860e7962057fcb84d2eef800996fcbc3f9f80eeb7efa79dbafcc" in 6.986665094s (6.986677074s including waiting) kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:42 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-65984cbf8c-8rt9l.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:42 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-65984cbf8c-8rt9l.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:42 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-65984cbf8c-8rt9l.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:abdf1ca15c55860e7962057fcb84d2eef800996fcbc3f9f80eeb7efa79dbafcc" already present on machine kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:42 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-65984cbf8c-8rt9l.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:42 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-65984cbf8c-8rt9l.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:42 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-65984cbf8c-8rt9l.spec.containers{tls-sidecar} Pulling Pulling image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:50 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-65984cbf8c-8rt9l.spec.containers{tls-sidecar} Pulled Successfully pulled image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" in 8.778439148s (8.778457559s including waiting) kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:51 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-65984cbf8c-8rt9l.spec.containers{tls-sidecar} Created Created container tls-sidecar kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:17:51 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-65984cbf8c-8rt9l.spec.containers{tls-sidecar} Started Started container tls-sidecar kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal Pod auto-provision-kafka-collector-796d488f74-btrhr Binding Scheduled Successfully assigned kuttl-test-immune-flea/auto-provision-kafka-collector-796d488f74-btrhr to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal Pod auto-provision-kafka-collector-796d488f74-btrhr AddedInterface Add eth0 [10.131.0.70/23] from ovn-kubernetes logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal Pod auto-provision-kafka-collector-796d488f74-btrhr.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal Pod auto-provision-kafka-collector-796d488f74-btrhr.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal Pod auto-provision-kafka-collector-796d488f74-btrhr.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-collector-796d488f74 SuccessfulCreate Created pod: auto-provision-kafka-collector-796d488f74-btrhr replicaset-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal Deployment.apps auto-provision-kafka-collector ScalingReplicaSet Scaled up replica set auto-provision-kafka-collector-796d488f74 to 1 deployment-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal Pod auto-provision-kafka-ingester-6fc58b8bd5-ndl86 Binding Scheduled Successfully assigned kuttl-test-immune-flea/auto-provision-kafka-ingester-6fc58b8bd5-ndl86 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal Pod auto-provision-kafka-ingester-6fc58b8bd5-ndl86 AddedInterface Add eth0 [10.131.0.71/23] from ovn-kubernetes logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal Pod auto-provision-kafka-ingester-6fc58b8bd5-ndl86.spec.containers{jaeger-ingester} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:6f019a3bc4c491d31a1af50e2929aa6e01b6c2c1fc12acbd0ef12204f4e56d07" kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-ingester-6fc58b8bd5 SuccessfulCreate Created pod: auto-provision-kafka-ingester-6fc58b8bd5-ndl86 replicaset-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal Deployment.apps auto-provision-kafka-ingester ScalingReplicaSet Scaled up replica set auto-provision-kafka-ingester-6fc58b8bd5 to 1 deployment-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal Pod auto-provision-kafka-query-5bf4ddd474-jqgcx Binding Scheduled Successfully assigned kuttl-test-immune-flea/auto-provision-kafka-query-5bf4ddd474-jqgcx to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal Pod auto-provision-kafka-query-5bf4ddd474-jqgcx AddedInterface Add eth0 [10.128.2.69/23] from ovn-kubernetes logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal Pod auto-provision-kafka-query-5bf4ddd474-jqgcx.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal Pod auto-provision-kafka-query-5bf4ddd474-jqgcx.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal Pod auto-provision-kafka-query-5bf4ddd474-jqgcx.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal Pod auto-provision-kafka-query-5bf4ddd474-jqgcx.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal Pod auto-provision-kafka-query-5bf4ddd474-jqgcx.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal Pod auto-provision-kafka-query-5bf4ddd474-jqgcx.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal Pod auto-provision-kafka-query-5bf4ddd474-jqgcx.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal Pod auto-provision-kafka-query-5bf4ddd474-jqgcx.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-query-5bf4ddd474 SuccessfulCreate Created pod: auto-provision-kafka-query-5bf4ddd474-jqgcx replicaset-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:17 +0000 UTC Normal Deployment.apps auto-provision-kafka-query ScalingReplicaSet Scaled up replica set auto-provision-kafka-query-5bf4ddd474 to 1 deployment-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:18 +0000 UTC Normal Pod auto-provision-kafka-query-5bf4ddd474-jqgcx.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:23 +0000 UTC Normal Pod auto-provision-kafka-ingester-6fc58b8bd5-ndl86.spec.containers{jaeger-ingester} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:6f019a3bc4c491d31a1af50e2929aa6e01b6c2c1fc12acbd0ef12204f4e56d07" in 5.468191753s (5.468204893s including waiting) kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:23 +0000 UTC Normal Pod auto-provision-kafka-ingester-6fc58b8bd5-ndl86.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:23 +0000 UTC Normal Pod auto-provision-kafka-ingester-6fc58b8bd5-ndl86.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:29 +0000 UTC Normal Pod auto-provision-kafka-query-5bf4ddd474-jqgcx.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:29 +0000 UTC Normal Pod auto-provision-kafka-query-5bf4ddd474-jqgcx.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:29 +0000 UTC Normal Pod auto-provision-kafka-query-5bf4ddd474-jqgcx.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:29 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-query-5bf4ddd474 SuccessfulDelete Deleted pod: auto-provision-kafka-query-5bf4ddd474-jqgcx replicaset-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:29 +0000 UTC Normal Deployment.apps auto-provision-kafka-query ScalingReplicaSet Scaled down replica set auto-provision-kafka-query-5bf4ddd474 to 0 from 1 deployment-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:30 +0000 UTC Normal Pod auto-provision-kafka-query-85b687768b-5mgq7 Binding Scheduled Successfully assigned kuttl-test-immune-flea/auto-provision-kafka-query-85b687768b-5mgq7 to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:30 +0000 UTC Normal Pod auto-provision-kafka-query-85b687768b-5mgq7 AddedInterface Add eth0 [10.128.2.70/23] from ovn-kubernetes logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:30 +0000 UTC Normal Pod auto-provision-kafka-query-85b687768b-5mgq7.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:30 +0000 UTC Normal Pod auto-provision-kafka-query-85b687768b-5mgq7.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:30 +0000 UTC Normal Pod auto-provision-kafka-query-85b687768b-5mgq7.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:30 +0000 UTC Normal Pod auto-provision-kafka-query-85b687768b-5mgq7.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:30 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-query-85b687768b SuccessfulCreate Created pod: auto-provision-kafka-query-85b687768b-5mgq7 replicaset-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:30 +0000 UTC Normal Deployment.apps auto-provision-kafka-query ScalingReplicaSet Scaled up replica set auto-provision-kafka-query-85b687768b to 1 deployment-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:31 +0000 UTC Normal Pod auto-provision-kafka-query-85b687768b-5mgq7.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:31 +0000 UTC Normal Pod auto-provision-kafka-query-85b687768b-5mgq7.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:31 +0000 UTC Normal Pod auto-provision-kafka-query-85b687768b-5mgq7.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:31 +0000 UTC Normal Pod auto-provision-kafka-query-85b687768b-5mgq7.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:31 +0000 UTC Normal Pod auto-provision-kafka-query-85b687768b-5mgq7.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:32 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:32 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:32 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:32 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:32 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:32 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:33 +0000 UTC Normal Pod check-span-6jvnf Binding Scheduled Successfully assigned kuttl-test-immune-flea/check-span-6jvnf to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:33 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-6jvnf job-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:33 +0000 UTC Normal Pod report-span-qb7fp Binding Scheduled Successfully assigned kuttl-test-immune-flea/report-span-qb7fp to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:33 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-qb7fp job-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:34 +0000 UTC Normal Pod check-span-6jvnf AddedInterface Add eth0 [10.131.0.73/23] from ovn-kubernetes logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:34 +0000 UTC Normal Pod check-span-6jvnf.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:34 +0000 UTC Normal Pod check-span-6jvnf.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:34 +0000 UTC Normal Pod check-span-6jvnf.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:34 +0000 UTC Normal Pod report-span-qb7fp AddedInterface Add eth0 [10.131.0.72/23] from ovn-kubernetes logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:34 +0000 UTC Normal Pod report-span-qb7fp.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:34 +0000 UTC Normal Pod report-span-qb7fp.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:34 +0000 UTC Normal Pod report-span-qb7fp.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:18:45 | examples-auto-provision-kafka | 2023-09-25 07:18:45 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:18:45 | examples-auto-provision-kafka | Deleting namespace: kuttl-test-immune-flea === CONT kuttl/harness/examples-agent-with-priority-class logger.go:42: 07:19:03 | examples-agent-with-priority-class | Creating namespace: kuttl-test-certain-leech logger.go:42: 07:19:03 | examples-agent-with-priority-class/0-install | starting test step 0-install logger.go:42: 07:19:03 | examples-agent-with-priority-class/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 07:19:03 | examples-agent-with-priority-class/0-install | ServiceAccount:kuttl-test-certain-leech/jaeger-agent-daemonset created logger.go:42: 07:19:03 | examples-agent-with-priority-class/0-install | test step completed 0-install logger.go:42: 07:19:03 | examples-agent-with-priority-class/1-install | starting test step 1-install logger.go:42: 07:19:03 | examples-agent-with-priority-class/1-install | PriorityClass:/high-priority created logger.go:42: 07:19:03 | examples-agent-with-priority-class/1-install | Jaeger:kuttl-test-certain-leech/agent-as-daemonset created logger.go:42: 07:19:09 | examples-agent-with-priority-class/1-install | test step completed 1-install logger.go:42: 07:19:09 | examples-agent-with-priority-class/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:19:09 | examples-agent-with-priority-class/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 07:19:11 | examples-agent-with-priority-class/2-smoke-test | Warning: resource jaegers/agent-as-daemonset is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:19:17 | examples-agent-with-priority-class/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:19:18 | examples-agent-with-priority-class/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:19:18 | examples-agent-with-priority-class/2-smoke-test | job.batch/report-span created logger.go:42: 07:19:18 | examples-agent-with-priority-class/2-smoke-test | job.batch/check-span created logger.go:42: 07:19:30 | examples-agent-with-priority-class/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:19:30 | examples-agent-with-priority-class | examples-agent-with-priority-class events from ns kuttl-test-certain-leech: logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:07 +0000 UTC Normal Pod agent-as-daemonset-55d594b54c-tc8tf Binding Scheduled Successfully assigned kuttl-test-certain-leech/agent-as-daemonset-55d594b54c-tc8tf to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:07 +0000 UTC Normal Pod agent-as-daemonset-55d594b54c-tc8tf AddedInterface Add eth0 [10.128.2.71/23] from ovn-kubernetes logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:07 +0000 UTC Normal Pod agent-as-daemonset-55d594b54c-tc8tf.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:07 +0000 UTC Normal Pod agent-as-daemonset-55d594b54c-tc8tf.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:07 +0000 UTC Normal Pod agent-as-daemonset-55d594b54c-tc8tf.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:07 +0000 UTC Normal Pod agent-as-daemonset-55d594b54c-tc8tf.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:07 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-55d594b54c SuccessfulCreate Created pod: agent-as-daemonset-55d594b54c-tc8tf replicaset-controller logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:07 +0000 UTC Warning DaemonSet.apps agent-as-daemonset-agent-daemonset FailedCreate Error creating: pods "agent-as-daemonset-agent-daemonset-" is forbidden: unable to validate against any security context constraint: [provider "anyuid": Forbidden: not usable by user or serviceaccount, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5775: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5778: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6831: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6832: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 14271: Host ports are not allowed to be used, provider "restricted": Forbidden: not usable by user or serviceaccount, provider "nonroot-v2": Forbidden: not usable by user or serviceaccount, provider "nonroot": Forbidden: not usable by user or serviceaccount, provider "hostmount-anyuid": Forbidden: not usable by user or serviceaccount, provider "elasticsearch-scc": Forbidden: not usable by user or serviceaccount, provider "machine-api-termination-handler": Forbidden: not usable by user or serviceaccount, provider "daemonset-with-hostport": Forbidden: not usable by user or serviceaccount, provider "hostnetwork-v2": Forbidden: not usable by user or serviceaccount, provider "hostnetwork": Forbidden: not usable by user or serviceaccount, provider "hostaccess": Forbidden: not usable by user or serviceaccount, provider "node-exporter": Forbidden: not usable by user or serviceaccount, provider "privileged": Forbidden: not usable by user or serviceaccount] daemonset-controller logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:07 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-55d594b54c to 1 deployment-controller logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:08 +0000 UTC Normal Pod agent-as-daemonset-55d594b54c-tc8tf.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:08 +0000 UTC Normal Pod agent-as-daemonset-55d594b54c-tc8tf.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:14 +0000 UTC Normal Pod agent-as-daemonset-55d594b54c-tc8tf.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:14 +0000 UTC Normal Pod agent-as-daemonset-55d594b54c-tc8tf.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:14 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-55d594b54c SuccessfulDelete Deleted pod: agent-as-daemonset-55d594b54c-tc8tf replicaset-controller logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:14 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled down replica set agent-as-daemonset-55d594b54c to 0 from 1 deployment-controller logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:15 +0000 UTC Normal Pod agent-as-daemonset-7f87c7ccb-w5lxg Binding Scheduled Successfully assigned kuttl-test-certain-leech/agent-as-daemonset-7f87c7ccb-w5lxg to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:15 +0000 UTC Normal Pod agent-as-daemonset-7f87c7ccb-w5lxg AddedInterface Add eth0 [10.128.2.72/23] from ovn-kubernetes logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:15 +0000 UTC Normal Pod agent-as-daemonset-7f87c7ccb-w5lxg.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:15 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-7f87c7ccb SuccessfulCreate Created pod: agent-as-daemonset-7f87c7ccb-w5lxg replicaset-controller logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:15 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-7f87c7ccb to 1 deployment-controller logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:16 +0000 UTC Normal Pod agent-as-daemonset-7f87c7ccb-w5lxg.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:16 +0000 UTC Normal Pod agent-as-daemonset-7f87c7ccb-w5lxg.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:16 +0000 UTC Normal Pod agent-as-daemonset-7f87c7ccb-w5lxg.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:16 +0000 UTC Normal Pod agent-as-daemonset-7f87c7ccb-w5lxg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:16 +0000 UTC Normal Pod agent-as-daemonset-7f87c7ccb-w5lxg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:18 +0000 UTC Normal Pod check-span-k4sth Binding Scheduled Successfully assigned kuttl-test-certain-leech/check-span-k4sth to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:18 +0000 UTC Normal Pod check-span-k4sth AddedInterface Add eth0 [10.129.2.60/23] from ovn-kubernetes logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:18 +0000 UTC Normal Pod check-span-k4sth.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:18 +0000 UTC Normal Pod check-span-k4sth.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:18 +0000 UTC Normal Pod check-span-k4sth.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:18 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-k4sth job-controller logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:18 +0000 UTC Normal Pod report-span-7z4w2 Binding Scheduled Successfully assigned kuttl-test-certain-leech/report-span-7z4w2 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:18 +0000 UTC Normal Pod report-span-7z4w2 AddedInterface Add eth0 [10.131.0.74/23] from ovn-kubernetes logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:18 +0000 UTC Normal Pod report-span-7z4w2.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:18 +0000 UTC Normal Pod report-span-7z4w2.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:18 +0000 UTC Normal Pod report-span-7z4w2.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:18 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-7z4w2 job-controller logger.go:42: 07:19:30 | examples-agent-with-priority-class | 2023-09-25 07:19:29 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:19:30 | examples-agent-with-priority-class | Deleting namespace: kuttl-test-certain-leech === CONT kuttl/harness/examples-agent-as-daemonset logger.go:42: 07:19:36 | examples-agent-as-daemonset | Creating namespace: kuttl-test-moving-loon logger.go:42: 07:19:36 | examples-agent-as-daemonset/0-install | starting test step 0-install logger.go:42: 07:19:36 | examples-agent-as-daemonset/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 07:19:36 | examples-agent-as-daemonset/0-install | ServiceAccount:kuttl-test-moving-loon/jaeger-agent-daemonset created logger.go:42: 07:19:36 | examples-agent-as-daemonset/0-install | test step completed 0-install logger.go:42: 07:19:36 | examples-agent-as-daemonset/1-install | starting test step 1-install logger.go:42: 07:19:36 | examples-agent-as-daemonset/1-install | Jaeger:kuttl-test-moving-loon/agent-as-daemonset created logger.go:42: 07:19:41 | examples-agent-as-daemonset/1-install | test step completed 1-install logger.go:42: 07:19:41 | examples-agent-as-daemonset/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:19:41 | examples-agent-as-daemonset/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 07:19:42 | examples-agent-as-daemonset/2-smoke-test | Warning: resource jaegers/agent-as-daemonset is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:19:49 | examples-agent-as-daemonset/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:19:49 | examples-agent-as-daemonset/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:19:49 | examples-agent-as-daemonset/2-smoke-test | job.batch/report-span created logger.go:42: 07:19:49 | examples-agent-as-daemonset/2-smoke-test | job.batch/check-span created logger.go:42: 07:20:01 | examples-agent-as-daemonset/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:20:01 | examples-agent-as-daemonset | examples-agent-as-daemonset events from ns kuttl-test-moving-loon: logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:40 +0000 UTC Normal Pod agent-as-daemonset-5cc6bc9649-ddtsj Binding Scheduled Successfully assigned kuttl-test-moving-loon/agent-as-daemonset-5cc6bc9649-ddtsj to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:40 +0000 UTC Normal Pod agent-as-daemonset-5cc6bc9649-ddtsj AddedInterface Add eth0 [10.128.2.73/23] from ovn-kubernetes logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:40 +0000 UTC Normal Pod agent-as-daemonset-5cc6bc9649-ddtsj.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:40 +0000 UTC Normal Pod agent-as-daemonset-5cc6bc9649-ddtsj.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:40 +0000 UTC Normal Pod agent-as-daemonset-5cc6bc9649-ddtsj.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:40 +0000 UTC Normal Pod agent-as-daemonset-5cc6bc9649-ddtsj.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:40 +0000 UTC Normal Pod agent-as-daemonset-5cc6bc9649-ddtsj.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:40 +0000 UTC Normal Pod agent-as-daemonset-5cc6bc9649-ddtsj.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:40 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-5cc6bc9649 SuccessfulCreate Created pod: agent-as-daemonset-5cc6bc9649-ddtsj replicaset-controller logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:40 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-5cc6bc9649 to 1 deployment-controller logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:42 +0000 UTC Warning DaemonSet.apps agent-as-daemonset-agent-daemonset FailedCreate Error creating: pods "agent-as-daemonset-agent-daemonset-" is forbidden: unable to validate against any security context constraint: [provider "anyuid": Forbidden: not usable by user or serviceaccount, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5775: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5778: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6831: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6832: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 14271: Host ports are not allowed to be used, provider "restricted": Forbidden: not usable by user or serviceaccount, provider "nonroot-v2": Forbidden: not usable by user or serviceaccount, provider "nonroot": Forbidden: not usable by user or serviceaccount, provider "hostmount-anyuid": Forbidden: not usable by user or serviceaccount, provider "elasticsearch-scc": Forbidden: not usable by user or serviceaccount, provider "machine-api-termination-handler": Forbidden: not usable by user or serviceaccount, provider "daemonset-with-hostport": Forbidden: not usable by user or serviceaccount, provider "hostnetwork-v2": Forbidden: not usable by user or serviceaccount, provider "hostnetwork": Forbidden: not usable by user or serviceaccount, provider "hostaccess": Forbidden: not usable by user or serviceaccount, provider "node-exporter": Forbidden: not usable by user or serviceaccount, provider "privileged": Forbidden: not usable by user or serviceaccount] daemonset-controller logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:45 +0000 UTC Normal Pod agent-as-daemonset-5cc6bc9649-ddtsj.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:45 +0000 UTC Normal Pod agent-as-daemonset-5cc6bc9649-ddtsj.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:45 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-5cc6bc9649 SuccessfulDelete Deleted pod: agent-as-daemonset-5cc6bc9649-ddtsj replicaset-controller logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:45 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled down replica set agent-as-daemonset-5cc6bc9649 to 0 from 1 deployment-controller logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:46 +0000 UTC Normal Pod agent-as-daemonset-64d498cf5c-vfvqn Binding Scheduled Successfully assigned kuttl-test-moving-loon/agent-as-daemonset-64d498cf5c-vfvqn to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:46 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-64d498cf5c SuccessfulCreate Created pod: agent-as-daemonset-64d498cf5c-vfvqn replicaset-controller logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:46 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-64d498cf5c to 1 deployment-controller logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:47 +0000 UTC Normal Pod agent-as-daemonset-64d498cf5c-vfvqn AddedInterface Add eth0 [10.131.0.75/23] from ovn-kubernetes logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:47 +0000 UTC Normal Pod agent-as-daemonset-64d498cf5c-vfvqn.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:47 +0000 UTC Normal Pod agent-as-daemonset-64d498cf5c-vfvqn.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:47 +0000 UTC Normal Pod agent-as-daemonset-64d498cf5c-vfvqn.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:47 +0000 UTC Normal Pod agent-as-daemonset-64d498cf5c-vfvqn.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:47 +0000 UTC Normal Pod agent-as-daemonset-64d498cf5c-vfvqn.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:47 +0000 UTC Normal Pod agent-as-daemonset-64d498cf5c-vfvqn.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:49 +0000 UTC Normal Pod check-span-nmvcw Binding Scheduled Successfully assigned kuttl-test-moving-loon/check-span-nmvcw to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:49 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-nmvcw job-controller logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:49 +0000 UTC Normal Pod report-span-lj2cz Binding Scheduled Successfully assigned kuttl-test-moving-loon/report-span-lj2cz to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:49 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-lj2cz job-controller logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:50 +0000 UTC Normal Pod check-span-nmvcw AddedInterface Add eth0 [10.129.2.61/23] from ovn-kubernetes logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:50 +0000 UTC Normal Pod check-span-nmvcw.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:50 +0000 UTC Normal Pod check-span-nmvcw.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:50 +0000 UTC Normal Pod check-span-nmvcw.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:50 +0000 UTC Normal Pod report-span-lj2cz AddedInterface Add eth0 [10.128.2.74/23] from ovn-kubernetes logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:50 +0000 UTC Normal Pod report-span-lj2cz.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:50 +0000 UTC Normal Pod report-span-lj2cz.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:19:50 +0000 UTC Normal Pod report-span-lj2cz.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:20:01 | examples-agent-as-daemonset | 2023-09-25 07:20:00 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:20:01 | examples-agent-as-daemonset | Deleting namespace: kuttl-test-moving-loon === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (950.20s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.74s) --- PASS: kuttl/harness/examples-service-types (59.14s) --- PASS: kuttl/harness/examples-with-sampling (115.11s) --- PASS: kuttl/harness/examples-with-cassandra (55.09s) --- PASS: kuttl/harness/examples-with-badger-and-volume (38.26s) --- PASS: kuttl/harness/examples-with-badger (32.54s) --- PASS: kuttl/harness/examples-simplest (37.18s) --- PASS: kuttl/harness/examples-simple-prod-with-volumes (90.94s) --- PASS: kuttl/harness/examples-simple-prod (69.03s) --- PASS: kuttl/harness/examples-business-application-injected-sidecar (43.66s) --- PASS: kuttl/harness/examples-openshift-with-htpasswd (22.82s) --- PASS: kuttl/harness/examples-openshift-agent-as-daemonset (55.75s) --- PASS: kuttl/harness/examples-collector-with-priority-class (38.42s) --- PASS: kuttl/harness/examples-all-in-one-with-options (37.62s) --- PASS: kuttl/harness/examples-auto-provision-kafka (179.76s) --- PASS: kuttl/harness/examples-agent-with-priority-class (32.89s) --- PASS: kuttl/harness/examples-agent-as-daemonset (36.21s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name examples --report --output /logs/artifacts/examples.xml ./artifacts/kuttl-report.xml time="2023-09-25T07:20:12Z" level=debug msg="Setting a new name for the test suites" time="2023-09-25T07:20:12Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-09-25T07:20:12Z" level=debug msg="normalizing test case names" time="2023-09-25T07:20:12Z" level=debug msg="examples/artifacts -> examples_artifacts" time="2023-09-25T07:20:12Z" level=debug msg="examples/examples-service-types -> examples_examples_service_types" time="2023-09-25T07:20:12Z" level=debug msg="examples/examples-with-sampling -> examples_examples_with_sampling" time="2023-09-25T07:20:12Z" level=debug msg="examples/examples-with-cassandra -> examples_examples_with_cassandra" time="2023-09-25T07:20:12Z" level=debug msg="examples/examples-with-badger-and-volume -> examples_examples_with_badger_and_volume" time="2023-09-25T07:20:12Z" level=debug msg="examples/examples-with-badger -> examples_examples_with_badger" time="2023-09-25T07:20:12Z" level=debug msg="examples/examples-simplest -> examples_examples_simplest" time="2023-09-25T07:20:12Z" level=debug msg="examples/examples-simple-prod-with-volumes -> examples_examples_simple_prod_with_volumes" time="2023-09-25T07:20:12Z" level=debug msg="examples/examples-simple-prod -> examples_examples_simple_prod" time="2023-09-25T07:20:12Z" level=debug msg="examples/examples-business-application-injected-sidecar -> examples_examples_business_application_injected_sidecar" time="2023-09-25T07:20:12Z" level=debug msg="examples/examples-openshift-with-htpasswd -> examples_examples_openshift_with_htpasswd" time="2023-09-25T07:20:12Z" level=debug msg="examples/examples-openshift-agent-as-daemonset -> examples_examples_openshift_agent_as_daemonset" time="2023-09-25T07:20:12Z" level=debug msg="examples/examples-collector-with-priority-class -> examples_examples_collector_with_priority_class" time="2023-09-25T07:20:12Z" level=debug msg="examples/examples-all-in-one-with-options -> examples_examples_all_in_one_with_options" time="2023-09-25T07:20:12Z" level=debug msg="examples/examples-auto-provision-kafka -> examples_examples_auto_provision_kafka" time="2023-09-25T07:20:12Z" level=debug msg="examples/examples-agent-with-priority-class -> examples_examples_agent_with_priority_class" time="2023-09-25T07:20:12Z" level=debug msg="examples/examples-agent-as-daemonset -> examples_examples_agent_as_daemonset" +---------------------------------------------------------+--------+ | NAME | RESULT | +---------------------------------------------------------+--------+ | examples_artifacts | passed | | examples_examples_service_types | passed | | examples_examples_with_sampling | passed | | examples_examples_with_cassandra | passed | | examples_examples_with_badger_and_volume | passed | | examples_examples_with_badger | passed | | examples_examples_simplest | passed | | examples_examples_simple_prod_with_volumes | passed | | examples_examples_simple_prod | passed | | examples_examples_business_application_injected_sidecar | passed | | examples_examples_openshift_with_htpasswd | passed | | examples_examples_openshift_agent_as_daemonset | passed | | examples_examples_collector_with_priority_class | passed | | examples_examples_all_in_one_with_options | passed | | examples_examples_auto_provision_kafka | passed | | examples_examples_agent_with_priority_class | passed | | examples_examples_agent_as_daemonset | passed | +---------------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 0 -gt 0 ']' + '[' 0 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh generate false true + '[' 3 -ne 3 ']' + test_suite_name=generate + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/generate.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-generate make[2]: Entering directory '/tmp/jaeger-tests' test -s /tmp/jaeger-tests/bin/operator-sdk || curl -sLo /tmp/jaeger-tests/bin/operator-sdk https://github.com/operator-framework/operator-sdk/releases/download/v1.27.0/operator-sdk_`go env GOOS`_`go env GOARCH` ./hack/install/install-golangci-lint.sh Installing golangci-lint golangci-lint 1.53.2 is installed already ./hack/install/install-goimports.sh Installing goimports Try 0... go install golang.org/x/tools/cmd/goimports@v0.1.12 >>>> Formatting code... ./.ci/format.sh >>>> Building... ./hack/install/install-dependencies.sh Installing go dependencies Try 0... go mod download GOOS= GOARCH= CGO_ENABLED=0 GO111MODULE=on go build -ldflags "-X "github.com/jaegertracing/jaeger-operator/pkg/version".version="1.49.0" -X "github.com/jaegertracing/jaeger-operator/pkg/version".buildDate=2023-09-25T07:20:14Z -X "github.com/jaegertracing/jaeger-operator/pkg/version".defaultJaeger="1.49.0"" -o "bin/jaeger-operator" main.go JAEGER_VERSION="1.49.0" ./tests/e2e/generate/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-24-044110 True False 39m Cluster version is 4.14.0-0.nightly-2023-09-24-044110' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-24-044110 True False 39m Cluster version is 4.14.0-0.nightly-2023-09-24-044110' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/generate/render.sh ++ export SUITE_DIR=./tests/e2e/generate ++ SUITE_DIR=./tests/e2e/generate ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/generate ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + '[' true = true ']' + skip_test generate 'This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 2 -ne 2 ']' + test_name=generate + message='This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/generate/_build + '[' _build '!=' _build ']' + rm -rf generate + warning 'generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed\e[0m' WAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running generate E2E tests' Running generate E2E tests + cd tests/e2e/generate/_build + set +e + KUBECONFIG=/tmp/kubeconfig-2270117936 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 07:20:30 | artifacts | Creating namespace: kuttl-test-worthy-shad logger.go:42: 07:20:30 | artifacts | artifacts events from ns kuttl-test-worthy-shad: logger.go:42: 07:20:30 | artifacts | Deleting namespace: kuttl-test-worthy-shad === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (5.81s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.77s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name generate --report --output /logs/artifacts/generate.xml ./artifacts/kuttl-report.xml time="2023-09-25T07:20:36Z" level=debug msg="Setting a new name for the test suites" time="2023-09-25T07:20:36Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-09-25T07:20:36Z" level=debug msg="normalizing test case names" time="2023-09-25T07:20:36Z" level=debug msg="generate/artifacts -> generate_artifacts" +--------------------+--------+ | NAME | RESULT | +--------------------+--------+ | generate_artifacts | passed | +--------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + '[' 0 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh miscellaneous false true + '[' 3 -ne 3 ']' + test_suite_name=miscellaneous + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/miscellaneous.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-miscellaneous make[2]: Entering directory '/tmp/jaeger-tests' SKIP_ES_EXTERNAL=true ./tests/e2e/miscellaneous/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-24-044110 True False 39m Cluster version is 4.14.0-0.nightly-2023-09-24-044110' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-24-044110 True False 39m Cluster version is 4.14.0-0.nightly-2023-09-24-044110' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/miscellaneous/render.sh ++ export SUITE_DIR=./tests/e2e/miscellaneous ++ SUITE_DIR=./tests/e2e/miscellaneous ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/miscellaneous ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + '[' true = true ']' + skip_test cassandra-spark 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=cassandra-spark + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + rm -rf cassandra-spark + warning 'cassandra-spark: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: cassandra-spark: Test not supported in OpenShift\e[0m' WAR: cassandra-spark: Test not supported in OpenShift + start_test collector-autoscale + '[' 1 -ne 1 ']' + test_name=collector-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-autoscale\e[0m' Rendering files for test collector-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p collector-autoscale + cd collector-autoscale + jaeger_name=simple-prod + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + ELASTICSEARCH_NODECOUNT=1 + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.resources.requests.memory="200m"' 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.autoscale=true 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.minReplicas=1 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.maxReplicas=2 01-install.yaml + kubectl api-versions + grep autoscaling/v2beta2 -q + rm ./04-assert.yaml + generate_otlp_e2e_tests http + test_protocol=http + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-http\e[0m' Rendering files for test collector-otlp-allinone-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-autoscale + '[' collector-autoscale '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-http + cd collector-otlp-allinone-http + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger http true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-http\e[0m' Rendering files for test collector-otlp-production-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-http + '[' collector-otlp-allinone-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-http + cd collector-otlp-production-http + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger http true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + generate_otlp_e2e_tests grpc + test_protocol=grpc + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-grpc\e[0m' Rendering files for test collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-http + '[' collector-otlp-production-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-grpc + cd collector-otlp-allinone-grpc + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-grpc\e[0m' Rendering files for test collector-otlp-production-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-grpc + '[' collector-otlp-allinone-grpc '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-grpc + cd collector-otlp-production-grpc + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + '[' true = true ']' + skip_test istio 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=istio + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-grpc + '[' collector-otlp-production-grpc '!=' _build ']' + cd .. + rm -rf istio + warning 'istio: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: istio: Test not supported in OpenShift\e[0m' WAR: istio: Test not supported in OpenShift + '[' true = true ']' + skip_test outside-cluster 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=outside-cluster + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + rm -rf outside-cluster + warning 'outside-cluster: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: outside-cluster: Test not supported in OpenShift\e[0m' WAR: outside-cluster: Test not supported in OpenShift + start_test set-custom-img + '[' 1 -ne 1 ']' + test_name=set-custom-img + echo =========================================================================== =========================================================================== + info 'Rendering files for test set-custom-img' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test set-custom-img\e[0m' Rendering files for test set-custom-img + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p set-custom-img + cd set-custom-img + jaeger_name=my-jaeger + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.image="test"' ./02-install.yaml + '[' true = true ']' + skip_test non-cluster-wide 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=non-cluster-wide + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/set-custom-img + '[' set-custom-img '!=' _build ']' + cd .. + rm -rf non-cluster-wide + warning 'non-cluster-wide: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: non-cluster-wide: Test not supported in OpenShift\e[0m' WAR: non-cluster-wide: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running miscellaneous E2E tests' Running miscellaneous E2E tests + cd tests/e2e/miscellaneous/_build + set +e + KUBECONFIG=/tmp/kubeconfig-2270117936 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 7 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/collector-autoscale === PAUSE kuttl/harness/collector-autoscale === RUN kuttl/harness/collector-otlp-allinone-grpc === PAUSE kuttl/harness/collector-otlp-allinone-grpc === RUN kuttl/harness/collector-otlp-allinone-http === PAUSE kuttl/harness/collector-otlp-allinone-http === RUN kuttl/harness/collector-otlp-production-grpc === PAUSE kuttl/harness/collector-otlp-production-grpc === RUN kuttl/harness/collector-otlp-production-http === PAUSE kuttl/harness/collector-otlp-production-http === RUN kuttl/harness/set-custom-img === PAUSE kuttl/harness/set-custom-img === CONT kuttl/harness/artifacts logger.go:42: 07:20:47 | artifacts | Creating namespace: kuttl-test-wired-stork logger.go:42: 07:20:47 | artifacts | artifacts events from ns kuttl-test-wired-stork: logger.go:42: 07:20:47 | artifacts | Deleting namespace: kuttl-test-wired-stork === CONT kuttl/harness/collector-otlp-production-grpc logger.go:42: 07:20:53 | collector-otlp-production-grpc | Creating namespace: kuttl-test-pretty-hookworm logger.go:42: 07:20:53 | collector-otlp-production-grpc/1-install | starting test step 1-install logger.go:42: 07:20:53 | collector-otlp-production-grpc/1-install | Jaeger:kuttl-test-pretty-hookworm/my-jaeger created logger.go:42: 07:21:28 | collector-otlp-production-grpc/1-install | test step completed 1-install logger.go:42: 07:21:28 | collector-otlp-production-grpc/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:21:28 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:21:29 | collector-otlp-production-grpc/2-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:21:36 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c REPORTING_PROTOCOL=grpc ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 07:21:37 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:21:37 | collector-otlp-production-grpc/2-smoke-test | job.batch/report-span created logger.go:42: 07:21:37 | collector-otlp-production-grpc/2-smoke-test | job.batch/check-span created logger.go:42: 07:21:57 | collector-otlp-production-grpc/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:21:57 | collector-otlp-production-grpc | collector-otlp-production-grpc events from ns kuttl-test-pretty-hookworm: logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:20:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestprettyhookwormmyjaeger-1-76b9542tpr9 Binding Scheduled Successfully assigned kuttl-test-pretty-hookworm/elasticsearch-cdm-kuttltestprettyhookwormmyjaeger-1-76b9542tpr9 to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:20:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestprettyhookwormmyjaeger-1-76b9542tpr9 AddedInterface Add eth0 [10.128.2.75/23] from ovn-kubernetes logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:20:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestprettyhookwormmyjaeger-1-76b9542tpr9.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:20:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestprettyhookwormmyjaeger-1-76b9542tpr9.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:20:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestprettyhookwormmyjaeger-1-76b9542tpr9.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:20:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestprettyhookwormmyjaeger-1-76b9542tpr9.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:20:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestprettyhookwormmyjaeger-1-76b9542tpr9.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:20:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestprettyhookwormmyjaeger-1-76b9542tpr9.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:20:59 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestprettyhookwormmyjaeger-1-76b954dcdc SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestprettyhookwormmyjaeger-1-76b9542tpr9 replicaset-controller logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:20:59 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestprettyhookwormmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestprettyhookwormmyjaeger-1-76b954dcdc to 1 deployment-controller logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:09 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestprettyhookwormmyjaeger-1-76b9542tpr9.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:14 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestprettyhookwormmyjaeger-1-76b9542tpr9.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:25 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-558ccfc8dd to 1 deployment-controller logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:26 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-hqld8 Binding Scheduled Successfully assigned kuttl-test-pretty-hookworm/my-jaeger-collector-558ccfc8dd-hqld8 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:26 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-hqld8 AddedInterface Add eth0 [10.131.0.76/23] from ovn-kubernetes logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:26 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-hqld8.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:26 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-hqld8.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:26 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-hqld8.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:26 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-558ccfc8dd SuccessfulCreate Created pod: my-jaeger-collector-558ccfc8dd-hqld8 replicaset-controller logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:26 +0000 UTC Normal Pod my-jaeger-query-7dfb9b7646-b8lgj Binding Scheduled Successfully assigned kuttl-test-pretty-hookworm/my-jaeger-query-7dfb9b7646-b8lgj to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:26 +0000 UTC Normal Pod my-jaeger-query-7dfb9b7646-b8lgj AddedInterface Add eth0 [10.131.0.77/23] from ovn-kubernetes logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:26 +0000 UTC Normal Pod my-jaeger-query-7dfb9b7646-b8lgj.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:26 +0000 UTC Normal Pod my-jaeger-query-7dfb9b7646-b8lgj.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:26 +0000 UTC Normal Pod my-jaeger-query-7dfb9b7646-b8lgj.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:26 +0000 UTC Normal Pod my-jaeger-query-7dfb9b7646-b8lgj.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:26 +0000 UTC Normal Pod my-jaeger-query-7dfb9b7646-b8lgj.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:26 +0000 UTC Normal Pod my-jaeger-query-7dfb9b7646-b8lgj.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:26 +0000 UTC Normal Pod my-jaeger-query-7dfb9b7646-b8lgj.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:26 +0000 UTC Normal Pod my-jaeger-query-7dfb9b7646-b8lgj.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:26 +0000 UTC Normal Pod my-jaeger-query-7dfb9b7646-b8lgj.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:26 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-7dfb9b7646 SuccessfulCreate Created pod: my-jaeger-query-7dfb9b7646-b8lgj replicaset-controller logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:26 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-7dfb9b7646 to 1 deployment-controller logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:31 +0000 UTC Normal Pod my-jaeger-query-7dfb9b7646-b8lgj.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:31 +0000 UTC Normal Pod my-jaeger-query-7dfb9b7646-b8lgj.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:31 +0000 UTC Normal Pod my-jaeger-query-7dfb9b7646-b8lgj.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:31 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-7dfb9b7646 SuccessfulDelete Deleted pod: my-jaeger-query-7dfb9b7646-b8lgj replicaset-controller logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:31 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-7dfb9b7646 to 0 from 1 deployment-controller logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:32 +0000 UTC Normal Pod my-jaeger-query-f9d58c5cc-b4qs9 Binding Scheduled Successfully assigned kuttl-test-pretty-hookworm/my-jaeger-query-f9d58c5cc-b4qs9 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:32 +0000 UTC Normal Pod my-jaeger-query-f9d58c5cc-b4qs9 AddedInterface Add eth0 [10.131.0.78/23] from ovn-kubernetes logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:32 +0000 UTC Normal Pod my-jaeger-query-f9d58c5cc-b4qs9.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:32 +0000 UTC Normal Pod my-jaeger-query-f9d58c5cc-b4qs9.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:32 +0000 UTC Normal Pod my-jaeger-query-f9d58c5cc-b4qs9.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:32 +0000 UTC Normal Pod my-jaeger-query-f9d58c5cc-b4qs9.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:32 +0000 UTC Normal Pod my-jaeger-query-f9d58c5cc-b4qs9.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:32 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-f9d58c5cc SuccessfulCreate Created pod: my-jaeger-query-f9d58c5cc-b4qs9 replicaset-controller logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:32 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-f9d58c5cc to 1 deployment-controller logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:33 +0000 UTC Normal Pod my-jaeger-query-f9d58c5cc-b4qs9.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:33 +0000 UTC Normal Pod my-jaeger-query-f9d58c5cc-b4qs9.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:33 +0000 UTC Normal Pod my-jaeger-query-f9d58c5cc-b4qs9.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:33 +0000 UTC Normal Pod my-jaeger-query-f9d58c5cc-b4qs9.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:37 +0000 UTC Normal Pod check-span-r95f9 Binding Scheduled Successfully assigned kuttl-test-pretty-hookworm/check-span-r95f9 to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:37 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-r95f9 job-controller logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:37 +0000 UTC Normal Pod report-span-k4vbs Binding Scheduled Successfully assigned kuttl-test-pretty-hookworm/report-span-k4vbs to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:37 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-k4vbs job-controller logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:38 +0000 UTC Normal Pod check-span-r95f9 AddedInterface Add eth0 [10.129.2.63/23] from ovn-kubernetes logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:38 +0000 UTC Normal Pod check-span-r95f9.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:38 +0000 UTC Normal Pod check-span-r95f9.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:38 +0000 UTC Normal Pod check-span-r95f9.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:38 +0000 UTC Normal Pod report-span-k4vbs AddedInterface Add eth0 [10.129.2.62/23] from ovn-kubernetes logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:38 +0000 UTC Normal Pod report-span-k4vbs.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:38 +0000 UTC Normal Pod report-span-k4vbs.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:38 +0000 UTC Normal Pod report-span-k4vbs.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:41 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:41 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:41 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:21:57 | collector-otlp-production-grpc | 2023-09-25 07:21:57 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:21:57 | collector-otlp-production-grpc | Deleting namespace: kuttl-test-pretty-hookworm === CONT kuttl/harness/set-custom-img logger.go:42: 07:22:09 | set-custom-img | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:22:09 | set-custom-img | Ignoring check-collector-img.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:22:09 | set-custom-img | Creating namespace: kuttl-test-rare-jennet logger.go:42: 07:22:09 | set-custom-img/1-install | starting test step 1-install logger.go:42: 07:22:09 | set-custom-img/1-install | Jaeger:kuttl-test-rare-jennet/my-jaeger created logger.go:42: 07:22:47 | set-custom-img/1-install | test step completed 1-install logger.go:42: 07:22:47 | set-custom-img/2-install | starting test step 2-install logger.go:42: 07:22:47 | set-custom-img/2-install | Jaeger:kuttl-test-rare-jennet/my-jaeger updated logger.go:42: 07:22:47 | set-custom-img/2-install | test step completed 2-install logger.go:42: 07:22:47 | set-custom-img/3-check-image | starting test step 3-check-image logger.go:42: 07:22:47 | set-custom-img/3-check-image | running command: [sh -c ./check-collector-img.sh] logger.go:42: 07:22:47 | set-custom-img/3-check-image | Collector image missmatch. Expected: test. Has: registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c logger.go:42: 07:22:52 | set-custom-img/3-check-image | Collector image asserted properly! logger.go:42: 07:22:52 | set-custom-img/3-check-image | test step completed 3-check-image logger.go:42: 07:22:52 | set-custom-img | set-custom-img events from ns kuttl-test-rare-jennet: logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:16 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestrarejennetmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestrarejennetmyjaeger-1-665d679ccf to 1 deployment-controller logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:17 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestrarejennetmyjaeger-1-665d679ccf SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestrarejennetmyjaeger-1-665d679ccfqns4g replicaset-controller logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrarejennetmyjaeger-1-665d679ccfqns4g Binding Scheduled Successfully assigned kuttl-test-rare-jennet/elasticsearch-cdm-kuttltestrarejennetmyjaeger-1-665d679ccfqns4g to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrarejennetmyjaeger-1-665d679ccfqns4g AddedInterface Add eth0 [10.128.2.76/23] from ovn-kubernetes logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrarejennetmyjaeger-1-665d679ccfqns4g.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrarejennetmyjaeger-1-665d679ccfqns4g.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrarejennetmyjaeger-1-665d679ccfqns4g.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrarejennetmyjaeger-1-665d679ccfqns4g.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrarejennetmyjaeger-1-665d679ccfqns4g.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrarejennetmyjaeger-1-665d679ccfqns4g.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:27 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestrarejennetmyjaeger-1-665d679ccfqns4g.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:32 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestrarejennetmyjaeger-1-665d679ccfqns4g.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:43 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-xtkbn Binding Scheduled Successfully assigned kuttl-test-rare-jennet/my-jaeger-collector-558ccfc8dd-xtkbn to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:43 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-558ccfc8dd SuccessfulCreate Created pod: my-jaeger-collector-558ccfc8dd-xtkbn replicaset-controller logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:43 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-558ccfc8dd to 1 deployment-controller logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:43 +0000 UTC Normal Pod my-jaeger-query-59d949467b-mz2kt Binding Scheduled Successfully assigned kuttl-test-rare-jennet/my-jaeger-query-59d949467b-mz2kt to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:43 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-59d949467b SuccessfulCreate Created pod: my-jaeger-query-59d949467b-mz2kt replicaset-controller logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:43 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-59d949467b to 1 deployment-controller logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:44 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-xtkbn AddedInterface Add eth0 [10.131.0.79/23] from ovn-kubernetes logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:44 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-xtkbn.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:44 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-xtkbn.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:44 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-xtkbn.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:44 +0000 UTC Normal Pod my-jaeger-query-59d949467b-mz2kt AddedInterface Add eth0 [10.131.0.80/23] from ovn-kubernetes logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:44 +0000 UTC Normal Pod my-jaeger-query-59d949467b-mz2kt.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:44 +0000 UTC Normal Pod my-jaeger-query-59d949467b-mz2kt.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:44 +0000 UTC Normal Pod my-jaeger-query-59d949467b-mz2kt.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:44 +0000 UTC Normal Pod my-jaeger-query-59d949467b-mz2kt.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:44 +0000 UTC Normal Pod my-jaeger-query-59d949467b-mz2kt.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:44 +0000 UTC Normal Pod my-jaeger-query-59d949467b-mz2kt.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:44 +0000 UTC Normal Pod my-jaeger-query-59d949467b-mz2kt.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:44 +0000 UTC Normal Pod my-jaeger-query-59d949467b-mz2kt.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:44 +0000 UTC Normal Pod my-jaeger-query-59d949467b-mz2kt.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:48 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-xtkbn.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:48 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-558ccfc8dd SuccessfulDelete Deleted pod: my-jaeger-collector-558ccfc8dd-xtkbn replicaset-controller logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:48 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled down replica set my-jaeger-collector-558ccfc8dd to 0 from 1 deployment-controller logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:49 +0000 UTC Normal Pod my-jaeger-collector-6755b759f8-d78b6 Binding Scheduled Successfully assigned kuttl-test-rare-jennet/my-jaeger-collector-6755b759f8-d78b6 to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:49 +0000 UTC Normal Pod my-jaeger-collector-6755b759f8-d78b6 AddedInterface Add eth0 [10.129.2.64/23] from ovn-kubernetes logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:49 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-6755b759f8 SuccessfulCreate Created pod: my-jaeger-collector-6755b759f8-d78b6 replicaset-controller logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:49 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-6755b759f8 to 1 deployment-controller logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:50 +0000 UTC Normal Pod my-jaeger-collector-6755b759f8-d78b6.spec.containers{jaeger-collector} Pulling Pulling image "test" kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:50 +0000 UTC Warning Pod my-jaeger-collector-6755b759f8-d78b6.spec.containers{jaeger-collector} Failed Failed to pull image "test": rpc error: code = Unknown desc = reading manifest latest in docker.io/library/test: requested access to the resource is denied kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:50 +0000 UTC Warning Pod my-jaeger-collector-6755b759f8-d78b6.spec.containers{jaeger-collector} Failed Error: ErrImagePull kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:51 +0000 UTC Normal Pod my-jaeger-collector-6755b759f8-d78b6.spec.containers{jaeger-collector} BackOff Back-off pulling image "test" kubelet logger.go:42: 07:22:52 | set-custom-img | 2023-09-25 07:22:51 +0000 UTC Warning Pod my-jaeger-collector-6755b759f8-d78b6.spec.containers{jaeger-collector} Failed Error: ImagePullBackOff kubelet logger.go:42: 07:22:52 | set-custom-img | Deleting namespace: kuttl-test-rare-jennet === CONT kuttl/harness/collector-otlp-production-http logger.go:42: 07:22:58 | collector-otlp-production-http | Creating namespace: kuttl-test-destined-halibut logger.go:42: 07:22:58 | collector-otlp-production-http/1-install | starting test step 1-install logger.go:42: 07:22:58 | collector-otlp-production-http/1-install | Jaeger:kuttl-test-destined-halibut/my-jaeger created logger.go:42: 07:23:34 | collector-otlp-production-http/1-install | test step completed 1-install logger.go:42: 07:23:34 | collector-otlp-production-http/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:23:34 | collector-otlp-production-http/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:23:36 | collector-otlp-production-http/2-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:23:42 | collector-otlp-production-http/2-smoke-test | running command: [sh -c REPORTING_PROTOCOL=http ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 07:23:43 | collector-otlp-production-http/2-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:23:43 | collector-otlp-production-http/2-smoke-test | job.batch/report-span created logger.go:42: 07:23:43 | collector-otlp-production-http/2-smoke-test | job.batch/check-span created logger.go:42: 07:23:54 | collector-otlp-production-http/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:23:54 | collector-otlp-production-http | collector-otlp-production-http events from ns kuttl-test-destined-halibut: logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:04 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestdestinedhalibutmyjaeger-1-74b5b455cc SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestdestinedhalibutmyjaeger-1-74b5bw8f69 replicaset-controller logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:04 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdestinedhalibutmyjaeger-1-74b5bw8f69 Binding Scheduled Successfully assigned kuttl-test-destined-halibut/elasticsearch-cdm-kuttltestdestinedhalibutmyjaeger-1-74b5bw8f69 to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:04 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdestinedhalibutmyjaeger-1-74b5bw8f69 AddedInterface Add eth0 [10.128.2.77/23] from ovn-kubernetes logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:04 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdestinedhalibutmyjaeger-1-74b5bw8f69.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:04 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestdestinedhalibutmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestdestinedhalibutmyjaeger-1-74b5b455cc to 1 deployment-controller logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdestinedhalibutmyjaeger-1-74b5bw8f69.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdestinedhalibutmyjaeger-1-74b5bw8f69.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdestinedhalibutmyjaeger-1-74b5bw8f69.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdestinedhalibutmyjaeger-1-74b5bw8f69.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdestinedhalibutmyjaeger-1-74b5bw8f69.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:20 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestdestinedhalibutmyjaeger-1-74b5bw8f69.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:31 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-s9lvx Binding Scheduled Successfully assigned kuttl-test-destined-halibut/my-jaeger-collector-558ccfc8dd-s9lvx to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:31 +0000 UTC Warning Pod my-jaeger-collector-558ccfc8dd-s9lvx FailedMount MountVolume.SetUp failed for volume "my-jaeger-collector-tls-config-volume" : secret "my-jaeger-collector-headless-tls" not found kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:31 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-558ccfc8dd SuccessfulCreate Created pod: my-jaeger-collector-558ccfc8dd-s9lvx replicaset-controller logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:31 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-558ccfc8dd to 1 deployment-controller logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:31 +0000 UTC Normal Pod my-jaeger-query-6689789c78-wxzrn Binding Scheduled Successfully assigned kuttl-test-destined-halibut/my-jaeger-query-6689789c78-wxzrn to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:31 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-6689789c78 SuccessfulCreate Created pod: my-jaeger-query-6689789c78-wxzrn replicaset-controller logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:31 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-6689789c78 to 1 deployment-controller logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:32 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-s9lvx AddedInterface Add eth0 [10.131.0.81/23] from ovn-kubernetes logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:32 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-s9lvx.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:32 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-s9lvx.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:32 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-s9lvx.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:32 +0000 UTC Normal Pod my-jaeger-query-6689789c78-wxzrn AddedInterface Add eth0 [10.131.0.82/23] from ovn-kubernetes logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:32 +0000 UTC Normal Pod my-jaeger-query-6689789c78-wxzrn.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:32 +0000 UTC Normal Pod my-jaeger-query-6689789c78-wxzrn.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:32 +0000 UTC Normal Pod my-jaeger-query-6689789c78-wxzrn.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:32 +0000 UTC Normal Pod my-jaeger-query-6689789c78-wxzrn.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:32 +0000 UTC Normal Pod my-jaeger-query-6689789c78-wxzrn.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:32 +0000 UTC Normal Pod my-jaeger-query-6689789c78-wxzrn.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:32 +0000 UTC Normal Pod my-jaeger-query-6689789c78-wxzrn.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:32 +0000 UTC Normal Pod my-jaeger-query-6689789c78-wxzrn.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:32 +0000 UTC Normal Pod my-jaeger-query-6689789c78-wxzrn.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:39 +0000 UTC Normal Pod my-jaeger-query-6689789c78-wxzrn.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:39 +0000 UTC Normal Pod my-jaeger-query-6689789c78-wxzrn.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:39 +0000 UTC Normal Pod my-jaeger-query-6689789c78-wxzrn.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:39 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-6689789c78 SuccessfulDelete Deleted pod: my-jaeger-query-6689789c78-wxzrn replicaset-controller logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:39 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-6689789c78 to 0 from 1 deployment-controller logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:40 +0000 UTC Normal Pod my-jaeger-query-65646859d6-hsvmb Binding Scheduled Successfully assigned kuttl-test-destined-halibut/my-jaeger-query-65646859d6-hsvmb to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:40 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-65646859d6 SuccessfulCreate Created pod: my-jaeger-query-65646859d6-hsvmb replicaset-controller logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:40 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-65646859d6 to 1 deployment-controller logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:41 +0000 UTC Normal Pod my-jaeger-query-65646859d6-hsvmb AddedInterface Add eth0 [10.131.0.83/23] from ovn-kubernetes logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:41 +0000 UTC Normal Pod my-jaeger-query-65646859d6-hsvmb.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:41 +0000 UTC Normal Pod my-jaeger-query-65646859d6-hsvmb.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:41 +0000 UTC Normal Pod my-jaeger-query-65646859d6-hsvmb.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:41 +0000 UTC Normal Pod my-jaeger-query-65646859d6-hsvmb.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:41 +0000 UTC Normal Pod my-jaeger-query-65646859d6-hsvmb.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:41 +0000 UTC Normal Pod my-jaeger-query-65646859d6-hsvmb.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:41 +0000 UTC Normal Pod my-jaeger-query-65646859d6-hsvmb.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:41 +0000 UTC Normal Pod my-jaeger-query-65646859d6-hsvmb.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:41 +0000 UTC Normal Pod my-jaeger-query-65646859d6-hsvmb.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:43 +0000 UTC Normal Pod check-span-zghr5 Binding Scheduled Successfully assigned kuttl-test-destined-halibut/check-span-zghr5 to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:43 +0000 UTC Normal Pod check-span-zghr5 AddedInterface Add eth0 [10.129.2.66/23] from ovn-kubernetes logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:43 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-zghr5 job-controller logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:43 +0000 UTC Normal Pod report-span-2n977 Binding Scheduled Successfully assigned kuttl-test-destined-halibut/report-span-2n977 to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:43 +0000 UTC Normal Pod report-span-2n977 AddedInterface Add eth0 [10.129.2.65/23] from ovn-kubernetes logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:43 +0000 UTC Normal Pod report-span-2n977.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:43 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-2n977 job-controller logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:44 +0000 UTC Normal Pod check-span-zghr5.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:44 +0000 UTC Normal Pod check-span-zghr5.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:44 +0000 UTC Normal Pod check-span-zghr5.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:44 +0000 UTC Normal Pod report-span-2n977.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:44 +0000 UTC Normal Pod report-span-2n977.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:46 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:46 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:46 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:23:54 | collector-otlp-production-http | 2023-09-25 07:23:54 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:23:54 | collector-otlp-production-http | Deleting namespace: kuttl-test-destined-halibut === CONT kuttl/harness/collector-otlp-allinone-grpc logger.go:42: 07:24:06 | collector-otlp-allinone-grpc | Creating namespace: kuttl-test-cool-martin logger.go:42: 07:24:06 | collector-otlp-allinone-grpc/0-install | starting test step 0-install logger.go:42: 07:24:06 | collector-otlp-allinone-grpc/0-install | Jaeger:kuttl-test-cool-martin/my-jaeger created logger.go:42: 07:24:12 | collector-otlp-allinone-grpc/0-install | test step completed 0-install logger.go:42: 07:24:12 | collector-otlp-allinone-grpc/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:24:12 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:24:13 | collector-otlp-allinone-grpc/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:24:19 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=grpc ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 07:24:20 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:24:20 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/report-span created logger.go:42: 07:24:20 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/check-span created logger.go:42: 07:24:39 | collector-otlp-allinone-grpc/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | collector-otlp-allinone-grpc events from ns kuttl-test-cool-martin: logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:10 +0000 UTC Normal Pod my-jaeger-5f57cffdc6-prjb2 Binding Scheduled Successfully assigned kuttl-test-cool-martin/my-jaeger-5f57cffdc6-prjb2 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:10 +0000 UTC Normal Pod my-jaeger-5f57cffdc6-prjb2 AddedInterface Add eth0 [10.131.0.84/23] from ovn-kubernetes logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:10 +0000 UTC Normal Pod my-jaeger-5f57cffdc6-prjb2.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:10 +0000 UTC Normal Pod my-jaeger-5f57cffdc6-prjb2.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:10 +0000 UTC Normal Pod my-jaeger-5f57cffdc6-prjb2.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:10 +0000 UTC Normal Pod my-jaeger-5f57cffdc6-prjb2.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:10 +0000 UTC Normal Pod my-jaeger-5f57cffdc6-prjb2.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:10 +0000 UTC Normal Pod my-jaeger-5f57cffdc6-prjb2.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:10 +0000 UTC Normal ReplicaSet.apps my-jaeger-5f57cffdc6 SuccessfulCreate Created pod: my-jaeger-5f57cffdc6-prjb2 replicaset-controller logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:10 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-5f57cffdc6 to 1 deployment-controller logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:14 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-5f57cffdc6 to 0 from 1 deployment-controller logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:15 +0000 UTC Normal Pod my-jaeger-5f57cffdc6-prjb2.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:15 +0000 UTC Normal Pod my-jaeger-5f57cffdc6-prjb2.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:15 +0000 UTC Normal ReplicaSet.apps my-jaeger-5f57cffdc6 SuccessfulDelete Deleted pod: my-jaeger-5f57cffdc6-prjb2 replicaset-controller logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:15 +0000 UTC Normal Pod my-jaeger-84d8cc64c4-bw6xk Binding Scheduled Successfully assigned kuttl-test-cool-martin/my-jaeger-84d8cc64c4-bw6xk to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:15 +0000 UTC Normal ReplicaSet.apps my-jaeger-84d8cc64c4 SuccessfulCreate Created pod: my-jaeger-84d8cc64c4-bw6xk replicaset-controller logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:15 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-84d8cc64c4 to 1 deployment-controller logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:16 +0000 UTC Normal Pod my-jaeger-84d8cc64c4-bw6xk AddedInterface Add eth0 [10.131.0.85/23] from ovn-kubernetes logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:16 +0000 UTC Normal Pod my-jaeger-84d8cc64c4-bw6xk.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:16 +0000 UTC Normal Pod my-jaeger-84d8cc64c4-bw6xk.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:16 +0000 UTC Normal Pod my-jaeger-84d8cc64c4-bw6xk.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:16 +0000 UTC Normal Pod my-jaeger-84d8cc64c4-bw6xk.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:16 +0000 UTC Normal Pod my-jaeger-84d8cc64c4-bw6xk.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:16 +0000 UTC Normal Pod my-jaeger-84d8cc64c4-bw6xk.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:20 +0000 UTC Normal Pod check-span-drf7h Binding Scheduled Successfully assigned kuttl-test-cool-martin/check-span-drf7h to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:20 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-drf7h job-controller logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:20 +0000 UTC Normal Pod report-span-7r257 Binding Scheduled Successfully assigned kuttl-test-cool-martin/report-span-7r257 to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:20 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-7r257 job-controller logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:21 +0000 UTC Normal Pod check-span-drf7h AddedInterface Add eth0 [10.129.2.67/23] from ovn-kubernetes logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:21 +0000 UTC Normal Pod check-span-drf7h.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:21 +0000 UTC Normal Pod check-span-drf7h.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:21 +0000 UTC Normal Pod check-span-drf7h.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:21 +0000 UTC Normal Pod report-span-7r257 AddedInterface Add eth0 [10.128.2.78/23] from ovn-kubernetes logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:21 +0000 UTC Normal Pod report-span-7r257.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:21 +0000 UTC Normal Pod report-span-7r257.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:21 +0000 UTC Normal Pod report-span-7r257.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | 2023-09-25 07:24:39 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:24:39 | collector-otlp-allinone-grpc | Deleting namespace: kuttl-test-cool-martin === CONT kuttl/harness/collector-otlp-allinone-http logger.go:42: 07:24:51 | collector-otlp-allinone-http | Creating namespace: kuttl-test-engaged-fowl logger.go:42: 07:24:51 | collector-otlp-allinone-http/0-install | starting test step 0-install logger.go:42: 07:24:52 | collector-otlp-allinone-http/0-install | Jaeger:kuttl-test-engaged-fowl/my-jaeger created logger.go:42: 07:24:58 | collector-otlp-allinone-http/0-install | test step completed 0-install logger.go:42: 07:24:58 | collector-otlp-allinone-http/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:24:58 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:24:59 | collector-otlp-allinone-http/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:25:05 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=http ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9 OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 07:25:05 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:25:06 | collector-otlp-allinone-http/1-smoke-test | job.batch/report-span created logger.go:42: 07:25:06 | collector-otlp-allinone-http/1-smoke-test | job.batch/check-span created logger.go:42: 07:25:18 | collector-otlp-allinone-http/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:25:18 | collector-otlp-allinone-http | collector-otlp-allinone-http events from ns kuttl-test-engaged-fowl: logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:24:55 +0000 UTC Normal Pod my-jaeger-6476f94-8zq7s Binding Scheduled Successfully assigned kuttl-test-engaged-fowl/my-jaeger-6476f94-8zq7s to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:24:55 +0000 UTC Warning Pod my-jaeger-6476f94-8zq7s FailedMount MountVolume.SetUp failed for volume "my-jaeger-ui-oauth-proxy-tls" : secret "my-jaeger-ui-oauth-proxy-tls" not found kubelet logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:24:55 +0000 UTC Normal ReplicaSet.apps my-jaeger-6476f94 SuccessfulCreate Created pod: my-jaeger-6476f94-8zq7s replicaset-controller logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:24:55 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-6476f94 to 1 deployment-controller logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:24:56 +0000 UTC Normal Pod my-jaeger-6476f94-8zq7s AddedInterface Add eth0 [10.128.2.79/23] from ovn-kubernetes logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:24:56 +0000 UTC Normal Pod my-jaeger-6476f94-8zq7s.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:24:56 +0000 UTC Normal Pod my-jaeger-6476f94-8zq7s.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:24:56 +0000 UTC Normal Pod my-jaeger-6476f94-8zq7s.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:24:56 +0000 UTC Normal Pod my-jaeger-6476f94-8zq7s.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:24:56 +0000 UTC Normal Pod my-jaeger-6476f94-8zq7s.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:24:56 +0000 UTC Normal Pod my-jaeger-6476f94-8zq7s.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:02 +0000 UTC Normal Pod my-jaeger-6476f94-8zq7s.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:02 +0000 UTC Normal Pod my-jaeger-6476f94-8zq7s.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:02 +0000 UTC Normal ReplicaSet.apps my-jaeger-6476f94 SuccessfulDelete Deleted pod: my-jaeger-6476f94-8zq7s replicaset-controller logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:02 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-6476f94 to 0 from 1 deployment-controller logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:03 +0000 UTC Normal Pod my-jaeger-5f4bc7b4c9-spkdp Binding Scheduled Successfully assigned kuttl-test-engaged-fowl/my-jaeger-5f4bc7b4c9-spkdp to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:03 +0000 UTC Normal Pod my-jaeger-5f4bc7b4c9-spkdp AddedInterface Add eth0 [10.128.2.80/23] from ovn-kubernetes logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:03 +0000 UTC Normal Pod my-jaeger-5f4bc7b4c9-spkdp.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:03 +0000 UTC Normal Pod my-jaeger-5f4bc7b4c9-spkdp.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:03 +0000 UTC Normal Pod my-jaeger-5f4bc7b4c9-spkdp.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:03 +0000 UTC Normal Pod my-jaeger-5f4bc7b4c9-spkdp.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:03 +0000 UTC Normal ReplicaSet.apps my-jaeger-5f4bc7b4c9 SuccessfulCreate Created pod: my-jaeger-5f4bc7b4c9-spkdp replicaset-controller logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:03 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-5f4bc7b4c9 to 1 deployment-controller logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:04 +0000 UTC Normal Pod my-jaeger-5f4bc7b4c9-spkdp.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:04 +0000 UTC Normal Pod my-jaeger-5f4bc7b4c9-spkdp.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:06 +0000 UTC Normal Pod check-span-djbrb Binding Scheduled Successfully assigned kuttl-test-engaged-fowl/check-span-djbrb to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:06 +0000 UTC Normal Pod check-span-djbrb AddedInterface Add eth0 [10.129.2.68/23] from ovn-kubernetes logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:06 +0000 UTC Normal Pod check-span-djbrb.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:06 +0000 UTC Normal Pod check-span-djbrb.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:06 +0000 UTC Normal Pod check-span-djbrb.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:06 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-djbrb job-controller logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:06 +0000 UTC Normal Pod report-span-7wj8q Binding Scheduled Successfully assigned kuttl-test-engaged-fowl/report-span-7wj8q to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:06 +0000 UTC Normal Pod report-span-7wj8q AddedInterface Add eth0 [10.131.0.86/23] from ovn-kubernetes logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:06 +0000 UTC Normal Pod report-span-7wj8q.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:06 +0000 UTC Normal Pod report-span-7wj8q.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:06 +0000 UTC Normal Pod report-span-7wj8q.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:06 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-7wj8q job-controller logger.go:42: 07:25:18 | collector-otlp-allinone-http | 2023-09-25 07:25:17 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:25:18 | collector-otlp-allinone-http | Deleting namespace: kuttl-test-engaged-fowl === CONT kuttl/harness/collector-autoscale logger.go:42: 07:25:25 | collector-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:25:25 | collector-autoscale | Ignoring wait-for-hpa.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:25:25 | collector-autoscale | Creating namespace: kuttl-test-liked-stallion logger.go:42: 07:25:25 | collector-autoscale/1-install | starting test step 1-install logger.go:42: 07:25:25 | collector-autoscale/1-install | Jaeger:kuttl-test-liked-stallion/simple-prod created logger.go:42: 07:26:02 | collector-autoscale/1-install | test step completed 1-install logger.go:42: 07:26:02 | collector-autoscale/2-wait-for-hpa | starting test step 2-wait-for-hpa logger.go:42: 07:26:02 | collector-autoscale/2-wait-for-hpa | running command: [sh -c ./wait-for-hpa.sh] logger.go:42: 07:26:02 | collector-autoscale/2-wait-for-hpa | Some HPA metrics are not known yet logger.go:42: 07:26:03 | collector-autoscale/2-wait-for-hpa | test step completed 2-wait-for-hpa logger.go:42: 07:26:03 | collector-autoscale/3- | starting test step 3- logger.go:42: 07:26:03 | collector-autoscale/3- | test step completed 3- logger.go:42: 07:26:03 | collector-autoscale | collector-autoscale events from ns kuttl-test-liked-stallion: logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:25:32 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestlikedstallionsimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestlikedstallionsimpleprod-1-5d47ffbb85 to 1 deployment-controller logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:25:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlikedstallionsimpleprod-1-5d47f5frgm Binding Scheduled Successfully assigned kuttl-test-liked-stallion/elasticsearch-cdm-kuttltestlikedstallionsimpleprod-1-5d47f5frgm to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:25:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlikedstallionsimpleprod-1-5d47f5frgm AddedInterface Add eth0 [10.129.2.69/23] from ovn-kubernetes logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:25:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlikedstallionsimpleprod-1-5d47f5frgm.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:25:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlikedstallionsimpleprod-1-5d47f5frgm.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:25:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlikedstallionsimpleprod-1-5d47f5frgm.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:25:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlikedstallionsimpleprod-1-5d47f5frgm.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:25:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlikedstallionsimpleprod-1-5d47f5frgm.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:25:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestlikedstallionsimpleprod-1-5d47f5frgm.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:25:33 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestlikedstallionsimpleprod-1-5d47ffbb85 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestlikedstallionsimpleprod-1-5d47f5frgm replicaset-controller logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:25:43 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestlikedstallionsimpleprod-1-5d47f5frgm.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:25:48 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestlikedstallionsimpleprod-1-5d47f5frgm.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:25:59 +0000 UTC Normal Pod simple-prod-collector-b86d94b64-m25vm Binding Scheduled Successfully assigned kuttl-test-liked-stallion/simple-prod-collector-b86d94b64-m25vm to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:25:59 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-b86d94b64 SuccessfulCreate Created pod: simple-prod-collector-b86d94b64-m25vm replicaset-controller logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:25:59 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-b86d94b64 to 1 deployment-controller logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:25:59 +0000 UTC Normal Pod simple-prod-query-64c7df454f-kr7j7 Binding Scheduled Successfully assigned kuttl-test-liked-stallion/simple-prod-query-64c7df454f-kr7j7 to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:25:59 +0000 UTC Normal ReplicaSet.apps simple-prod-query-64c7df454f SuccessfulCreate Created pod: simple-prod-query-64c7df454f-kr7j7 replicaset-controller logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:25:59 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-64c7df454f to 1 deployment-controller logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:26:00 +0000 UTC Normal Pod simple-prod-collector-b86d94b64-m25vm AddedInterface Add eth0 [10.128.2.81/23] from ovn-kubernetes logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:26:00 +0000 UTC Normal Pod simple-prod-collector-b86d94b64-m25vm.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:26:00 +0000 UTC Normal Pod simple-prod-collector-b86d94b64-m25vm.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:26:00 +0000 UTC Normal Pod simple-prod-collector-b86d94b64-m25vm.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:26:00 +0000 UTC Warning Pod simple-prod-query-64c7df454f-kr7j7 FailedMount MountVolume.SetUp failed for volume "simple-prod-ui-oauth-proxy-tls" : secret "simple-prod-ui-oauth-proxy-tls" not found kubelet logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:26:00 +0000 UTC Normal Pod simple-prod-query-64c7df454f-kr7j7 AddedInterface Add eth0 [10.128.2.82/23] from ovn-kubernetes logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:26:00 +0000 UTC Normal Pod simple-prod-query-64c7df454f-kr7j7.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:26:01 +0000 UTC Normal Pod simple-prod-query-64c7df454f-kr7j7.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:26:01 +0000 UTC Normal Pod simple-prod-query-64c7df454f-kr7j7.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:26:01 +0000 UTC Normal Pod simple-prod-query-64c7df454f-kr7j7.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:26:01 +0000 UTC Normal Pod simple-prod-query-64c7df454f-kr7j7.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:26:01 +0000 UTC Normal Pod simple-prod-query-64c7df454f-kr7j7.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:26:01 +0000 UTC Normal Pod simple-prod-query-64c7df454f-kr7j7.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:26:01 +0000 UTC Normal Pod simple-prod-query-64c7df454f-kr7j7.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:26:03 | collector-autoscale | 2023-09-25 07:26:01 +0000 UTC Normal Pod simple-prod-query-64c7df454f-kr7j7.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:26:03 | collector-autoscale | Deleting namespace: kuttl-test-liked-stallion === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (323.63s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.74s) --- PASS: kuttl/harness/collector-otlp-production-grpc (76.69s) --- PASS: kuttl/harness/set-custom-img (48.98s) --- PASS: kuttl/harness/collector-otlp-production-http (67.74s) --- PASS: kuttl/harness/collector-otlp-allinone-grpc (45.46s) --- PASS: kuttl/harness/collector-otlp-allinone-http (33.33s) --- PASS: kuttl/harness/collector-autoscale (45.65s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name miscellaneous --report --output /logs/artifacts/miscellaneous.xml ./artifacts/kuttl-report.xml time="2023-09-25T07:26:11Z" level=debug msg="Setting a new name for the test suites" time="2023-09-25T07:26:11Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-09-25T07:26:11Z" level=debug msg="normalizing test case names" time="2023-09-25T07:26:11Z" level=debug msg="miscellaneous/artifacts -> miscellaneous_artifacts" time="2023-09-25T07:26:11Z" level=debug msg="miscellaneous/collector-otlp-production-grpc -> miscellaneous_collector_otlp_production_grpc" time="2023-09-25T07:26:11Z" level=debug msg="miscellaneous/set-custom-img -> miscellaneous_set_custom_img" time="2023-09-25T07:26:11Z" level=debug msg="miscellaneous/collector-otlp-production-http -> miscellaneous_collector_otlp_production_http" time="2023-09-25T07:26:11Z" level=debug msg="miscellaneous/collector-otlp-allinone-grpc -> miscellaneous_collector_otlp_allinone_grpc" time="2023-09-25T07:26:11Z" level=debug msg="miscellaneous/collector-otlp-allinone-http -> miscellaneous_collector_otlp_allinone_http" time="2023-09-25T07:26:11Z" level=debug msg="miscellaneous/collector-autoscale -> miscellaneous_collector_autoscale" +----------------------------------------------+--------+ | NAME | RESULT | +----------------------------------------------+--------+ | miscellaneous_artifacts | passed | | miscellaneous_collector_otlp_production_grpc | passed | | miscellaneous_set_custom_img | passed | | miscellaneous_collector_otlp_production_http | passed | | miscellaneous_collector_otlp_allinone_grpc | passed | | miscellaneous_collector_otlp_allinone_http | passed | | miscellaneous_collector_autoscale | passed | +----------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + '[' 0 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh sidecar false true + '[' 3 -ne 3 ']' + test_suite_name=sidecar + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/sidecar.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-sidecar make[2]: Entering directory '/tmp/jaeger-tests' ./tests/e2e/sidecar/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-24-044110 True False 45m Cluster version is 4.14.0-0.nightly-2023-09-24-044110' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-24-044110 True False 45m Cluster version is 4.14.0-0.nightly-2023-09-24-044110' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/sidecar/render.sh ++ export SUITE_DIR=./tests/e2e/sidecar ++ SUITE_DIR=./tests/e2e/sidecar ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/sidecar ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + jaeger_service_name=order + start_test sidecar-deployment + '[' 1 -ne 1 ']' + test_name=sidecar-deployment + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-deployment' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-deployment\e[0m' Rendering files for test sidecar-deployment + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build + '[' _build '!=' _build ']' + mkdir -p sidecar-deployment + cd sidecar-deployment + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-namespace + '[' 1 -ne 1 ']' + test_name=sidecar-namespace + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-namespace' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-namespace\e[0m' Rendering files for test sidecar-namespace + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-deployment + '[' sidecar-deployment '!=' _build ']' + cd .. + mkdir -p sidecar-namespace + cd sidecar-namespace + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-skip-webhook + '[' 1 -ne 1 ']' + test_name=sidecar-skip-webhook + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-skip-webhook' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-skip-webhook\e[0m' Rendering files for test sidecar-skip-webhook + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-namespace + '[' sidecar-namespace '!=' _build ']' + cd .. + mkdir -p sidecar-skip-webhook + cd sidecar-skip-webhook + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running sidecar E2E tests' Running sidecar E2E tests + cd tests/e2e/sidecar/_build + set +e + KUBECONFIG=/tmp/kubeconfig-2270117936 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 4 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/sidecar-deployment === PAUSE kuttl/harness/sidecar-deployment === RUN kuttl/harness/sidecar-namespace === PAUSE kuttl/harness/sidecar-namespace === RUN kuttl/harness/sidecar-skip-webhook === PAUSE kuttl/harness/sidecar-skip-webhook === CONT kuttl/harness/artifacts logger.go:42: 07:26:18 | artifacts | Creating namespace: kuttl-test-special-doberman logger.go:42: 07:26:18 | artifacts | artifacts events from ns kuttl-test-special-doberman: logger.go:42: 07:26:18 | artifacts | Deleting namespace: kuttl-test-special-doberman === CONT kuttl/harness/sidecar-namespace logger.go:42: 07:26:24 | sidecar-namespace | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:26:24 | sidecar-namespace | Creating namespace: kuttl-test-emerging-goblin logger.go:42: 07:26:24 | sidecar-namespace/0-install | starting test step 0-install logger.go:42: 07:26:24 | sidecar-namespace/0-install | Jaeger:kuttl-test-emerging-goblin/agent-as-sidecar created logger.go:42: 07:26:30 | sidecar-namespace/0-install | test step completed 0-install logger.go:42: 07:26:30 | sidecar-namespace/1-install | starting test step 1-install logger.go:42: 07:26:30 | sidecar-namespace/1-install | Deployment:kuttl-test-emerging-goblin/vertx-create-span-sidecar created logger.go:42: 07:26:32 | sidecar-namespace/1-install | test step completed 1-install logger.go:42: 07:26:32 | sidecar-namespace/2-enable-injection | starting test step 2-enable-injection logger.go:42: 07:26:32 | sidecar-namespace/2-enable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="true"] logger.go:42: 07:26:32 | sidecar-namespace/2-enable-injection | namespace/kuttl-test-emerging-goblin annotated logger.go:42: 07:26:33 | sidecar-namespace/2-enable-injection | test step completed 2-enable-injection logger.go:42: 07:26:33 | sidecar-namespace/3-find-service | starting test step 3-find-service logger.go:42: 07:26:33 | sidecar-namespace/3-find-service | Job:kuttl-test-emerging-goblin/00-find-service created logger.go:42: 07:26:45 | sidecar-namespace/3-find-service | test step completed 3-find-service logger.go:42: 07:26:45 | sidecar-namespace/4-other-instance | starting test step 4-other-instance logger.go:42: 07:26:45 | sidecar-namespace/4-other-instance | Jaeger:kuttl-test-emerging-goblin/agent-as-sidecar2 created logger.go:42: 07:26:54 | sidecar-namespace/4-other-instance | test step completed 4-other-instance logger.go:42: 07:26:54 | sidecar-namespace/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 07:26:55 | sidecar-namespace/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 07:26:55 | sidecar-namespace/6-find-service | starting test step 6-find-service logger.go:42: 07:26:56 | sidecar-namespace/6-find-service | Job:kuttl-test-emerging-goblin/01-find-service created logger.go:42: 07:27:15 | sidecar-namespace/6-find-service | test step completed 6-find-service logger.go:42: 07:27:15 | sidecar-namespace/7-disable-injection | starting test step 7-disable-injection logger.go:42: 07:27:15 | sidecar-namespace/7-disable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="false"] logger.go:42: 07:27:15 | sidecar-namespace/7-disable-injection | namespace/kuttl-test-emerging-goblin annotated logger.go:42: 07:27:20 | sidecar-namespace/7-disable-injection | test step completed 7-disable-injection logger.go:42: 07:27:20 | sidecar-namespace | sidecar-namespace events from ns kuttl-test-emerging-goblin: logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:28 +0000 UTC Normal Pod agent-as-sidecar-5dfcff9fc6-php9v Binding Scheduled Successfully assigned kuttl-test-emerging-goblin/agent-as-sidecar-5dfcff9fc6-php9v to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:28 +0000 UTC Normal Pod agent-as-sidecar-5dfcff9fc6-php9v AddedInterface Add eth0 [10.128.2.83/23] from ovn-kubernetes logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:28 +0000 UTC Normal Pod agent-as-sidecar-5dfcff9fc6-php9v.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:28 +0000 UTC Normal Pod agent-as-sidecar-5dfcff9fc6-php9v.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:28 +0000 UTC Normal Pod agent-as-sidecar-5dfcff9fc6-php9v.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:28 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-5dfcff9fc6 SuccessfulCreate Created pod: agent-as-sidecar-5dfcff9fc6-php9v replicaset-controller logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:28 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-5dfcff9fc6 to 1 deployment-controller logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:30 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-w648n Binding Scheduled Successfully assigned kuttl-test-emerging-goblin/vertx-create-span-sidecar-568b7c9f6f-w648n to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:30 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-568b7c9f6f SuccessfulCreate Created pod: vertx-create-span-sidecar-568b7c9f6f-w648n replicaset-controller logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:30 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-568b7c9f6f to 1 deployment-controller logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:31 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-w648n AddedInterface Add eth0 [10.131.0.87/23] from ovn-kubernetes logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:31 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-w648n.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:31 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-w648n.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:31 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-w648n.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:32 +0000 UTC Normal Pod vertx-create-span-sidecar-5b94df59dc-jzhbb Binding Scheduled Successfully assigned kuttl-test-emerging-goblin/vertx-create-span-sidecar-5b94df59dc-jzhbb to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:32 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-5b94df59dc SuccessfulCreate Created pod: vertx-create-span-sidecar-5b94df59dc-jzhbb replicaset-controller logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:32 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-5b94df59dc to 1 deployment-controller logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:33 +0000 UTC Normal Pod 00-find-service-8qptr Binding Scheduled Successfully assigned kuttl-test-emerging-goblin/00-find-service-8qptr to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:33 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-8qptr job-controller logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:33 +0000 UTC Normal Pod vertx-create-span-sidecar-5b94df59dc-jzhbb AddedInterface Add eth0 [10.128.2.84/23] from ovn-kubernetes logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:33 +0000 UTC Normal Pod vertx-create-span-sidecar-5b94df59dc-jzhbb.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:33 +0000 UTC Normal Pod vertx-create-span-sidecar-5b94df59dc-jzhbb.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:33 +0000 UTC Normal Pod vertx-create-span-sidecar-5b94df59dc-jzhbb.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:33 +0000 UTC Normal Pod vertx-create-span-sidecar-5b94df59dc-jzhbb.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:33 +0000 UTC Normal Pod vertx-create-span-sidecar-5b94df59dc-jzhbb.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:33 +0000 UTC Normal Pod vertx-create-span-sidecar-5b94df59dc-jzhbb.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:34 +0000 UTC Normal Pod 00-find-service-8qptr AddedInterface Add eth0 [10.131.0.88/23] from ovn-kubernetes logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:34 +0000 UTC Normal Pod 00-find-service-8qptr.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:34 +0000 UTC Normal Pod 00-find-service-8qptr.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:34 +0000 UTC Normal Pod 00-find-service-8qptr.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:40 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-w648n.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.87:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:40 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-w648n.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.87:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:41 +0000 UTC Warning Pod vertx-create-span-sidecar-5b94df59dc-jzhbb.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.84:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:41 +0000 UTC Warning Pod vertx-create-span-sidecar-5b94df59dc-jzhbb.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.84:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:42 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-w648n.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:42 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-w648n.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.87:8080/": read tcp 10.131.0.2:58874->10.131.0.87:8080: read: connection reset by peer kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:42 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-w648n.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.87:8080/": dial tcp 10.131.0.87:8080: connect: connection refused kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:43 +0000 UTC Normal Pod vertx-create-span-sidecar-5b94df59dc-jzhbb.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:43 +0000 UTC Warning Pod vertx-create-span-sidecar-5b94df59dc-jzhbb.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.84:8080/": read tcp 10.128.2.2:40522->10.128.2.84:8080: read: connection reset by peer kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:43 +0000 UTC Warning Pod vertx-create-span-sidecar-5b94df59dc-jzhbb.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.84:8080/": dial tcp 10.128.2.84:8080: connect: connection refused kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:45 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:51 +0000 UTC Normal Pod agent-as-sidecar2-6fc97b8954-vxjqs Binding Scheduled Successfully assigned kuttl-test-emerging-goblin/agent-as-sidecar2-6fc97b8954-vxjqs to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:51 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-6fc97b8954 SuccessfulCreate Created pod: agent-as-sidecar2-6fc97b8954-vxjqs replicaset-controller logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:51 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-6fc97b8954 to 1 deployment-controller logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:52 +0000 UTC Warning Pod agent-as-sidecar2-6fc97b8954-vxjqs FailedMount MountVolume.SetUp failed for volume "agent-as-sidecar2-collector-tls-config-volume" : secret "agent-as-sidecar2-collector-headless-tls" not found kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:52 +0000 UTC Normal Pod agent-as-sidecar2-6fc97b8954-vxjqs AddedInterface Add eth0 [10.131.0.89/23] from ovn-kubernetes logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:52 +0000 UTC Normal Pod agent-as-sidecar2-6fc97b8954-vxjqs.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:52 +0000 UTC Normal Pod agent-as-sidecar2-6fc97b8954-vxjqs.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:53 +0000 UTC Normal Pod agent-as-sidecar2-6fc97b8954-vxjqs.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:53 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-w648n.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.87:8080/": read tcp 10.131.0.2:38196->10.131.0.87:8080: read: connection reset by peer kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:55 +0000 UTC Normal Pod agent-as-sidecar-5dfcff9fc6-php9v.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:56 +0000 UTC Normal Pod 01-find-service-x78p6 Binding Scheduled Successfully assigned kuttl-test-emerging-goblin/01-find-service-x78p6 to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:56 +0000 UTC Normal Pod 01-find-service-x78p6 AddedInterface Add eth0 [10.129.2.70/23] from ovn-kubernetes logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:56 +0000 UTC Normal Pod 01-find-service-x78p6.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:56 +0000 UTC Normal Pod 01-find-service-x78p6.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:56 +0000 UTC Normal Pod 01-find-service-x78p6.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:56 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-x78p6 job-controller logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:58 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-568b7c9f6f SuccessfulDelete Deleted pod: vertx-create-span-sidecar-568b7c9f6f-w648n replicaset-controller logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:58 +0000 UTC Normal Pod vertx-create-span-sidecar-75759ff889-7xrks Binding Scheduled Successfully assigned kuttl-test-emerging-goblin/vertx-create-span-sidecar-75759ff889-7xrks to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:58 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-75759ff889 SuccessfulCreate Created pod: vertx-create-span-sidecar-75759ff889-7xrks replicaset-controller logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:58 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-568b7c9f6f to 0 from 1 deployment-controller logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:58 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-75759ff889 to 1 from 0 deployment-controller logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:59 +0000 UTC Normal Pod vertx-create-span-sidecar-75759ff889-7xrks AddedInterface Add eth0 [10.131.0.90/23] from ovn-kubernetes logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:59 +0000 UTC Normal Pod vertx-create-span-sidecar-75759ff889-7xrks.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:59 +0000 UTC Normal Pod vertx-create-span-sidecar-75759ff889-7xrks.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:59 +0000 UTC Normal Pod vertx-create-span-sidecar-75759ff889-7xrks.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:59 +0000 UTC Normal Pod vertx-create-span-sidecar-75759ff889-7xrks.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:59 +0000 UTC Normal Pod vertx-create-span-sidecar-75759ff889-7xrks.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:26:59 +0000 UTC Normal Pod vertx-create-span-sidecar-75759ff889-7xrks.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:27:08 +0000 UTC Warning Pod vertx-create-span-sidecar-75759ff889-7xrks.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.90:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:27:08 +0000 UTC Warning Pod vertx-create-span-sidecar-75759ff889-7xrks.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.90:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:27:10 +0000 UTC Normal Pod vertx-create-span-sidecar-75759ff889-7xrks.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:27:10 +0000 UTC Warning Pod vertx-create-span-sidecar-75759ff889-7xrks.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.90:8080/": read tcp 10.131.0.2:57382->10.131.0.90:8080: read: connection reset by peer kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:27:10 +0000 UTC Warning Pod vertx-create-span-sidecar-75759ff889-7xrks.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.90:8080/": dial tcp 10.131.0.90:8080: connect: connection refused kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:27:15 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:27:15 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-5b94df59dc SuccessfulDelete Deleted pod: vertx-create-span-sidecar-5b94df59dc-jzhbb replicaset-controller logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:27:15 +0000 UTC Normal Pod vertx-create-span-sidecar-6bf76ff7b4-84fkr Binding Scheduled Successfully assigned kuttl-test-emerging-goblin/vertx-create-span-sidecar-6bf76ff7b4-84fkr to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:27:15 +0000 UTC Normal Pod vertx-create-span-sidecar-6bf76ff7b4-84fkr AddedInterface Add eth0 [10.129.2.71/23] from ovn-kubernetes logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:27:15 +0000 UTC Normal Pod vertx-create-span-sidecar-6bf76ff7b4-84fkr.spec.containers{vertx-create-span-sidecar} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:27:15 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-6bf76ff7b4 SuccessfulCreate Created pod: vertx-create-span-sidecar-6bf76ff7b4-84fkr replicaset-controller logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:27:15 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-5b94df59dc to 0 from 1 deployment-controller logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:27:15 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-6bf76ff7b4 to 1 from 0 deployment-controller logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:27:19 +0000 UTC Normal Pod vertx-create-span-sidecar-6bf76ff7b4-84fkr.spec.containers{vertx-create-span-sidecar} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.352306381s (3.352326052s including waiting) kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:27:19 +0000 UTC Normal Pod vertx-create-span-sidecar-6bf76ff7b4-84fkr.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:27:20 | sidecar-namespace | 2023-09-25 07:27:19 +0000 UTC Normal Pod vertx-create-span-sidecar-6bf76ff7b4-84fkr.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:27:20 | sidecar-namespace | Deleting namespace: kuttl-test-emerging-goblin === CONT kuttl/harness/sidecar-skip-webhook logger.go:42: 07:27:26 | sidecar-skip-webhook | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:27:26 | sidecar-skip-webhook | Creating namespace: kuttl-test-tender-buck logger.go:42: 07:27:26 | sidecar-skip-webhook/0-install | starting test step 0-install logger.go:42: 07:27:26 | sidecar-skip-webhook/0-install | Jaeger:kuttl-test-tender-buck/agent-as-sidecar created logger.go:42: 07:27:32 | sidecar-skip-webhook/0-install | test step completed 0-install logger.go:42: 07:27:32 | sidecar-skip-webhook/1-install | starting test step 1-install logger.go:42: 07:27:32 | sidecar-skip-webhook/1-install | Deployment:kuttl-test-tender-buck/vertx-create-span-sidecar created logger.go:42: 07:27:34 | sidecar-skip-webhook/1-install | test step completed 1-install logger.go:42: 07:27:34 | sidecar-skip-webhook/2-add-anotation-and-label | starting test step 2-add-anotation-and-label logger.go:42: 07:27:34 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name=jaeger-operator --namespace kuttl-test-tender-buck] logger.go:42: 07:27:34 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar labeled logger.go:42: 07:27:34 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-tender-buck] logger.go:42: 07:27:35 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar annotated logger.go:42: 07:27:35 | sidecar-skip-webhook/2-add-anotation-and-label | test step completed 2-add-anotation-and-label logger.go:42: 07:27:35 | sidecar-skip-webhook/3-remove-label | starting test step 3-remove-label logger.go:42: 07:27:35 | sidecar-skip-webhook/3-remove-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name- --namespace kuttl-test-tender-buck] logger.go:42: 07:27:35 | sidecar-skip-webhook/3-remove-label | deployment.apps/vertx-create-span-sidecar unlabeled logger.go:42: 07:27:37 | sidecar-skip-webhook/3-remove-label | test step completed 3-remove-label logger.go:42: 07:27:37 | sidecar-skip-webhook | sidecar-skip-webhook events from ns kuttl-test-tender-buck: logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:30 +0000 UTC Normal Pod agent-as-sidecar-76d9575d4d-phb5l Binding Scheduled Successfully assigned kuttl-test-tender-buck/agent-as-sidecar-76d9575d4d-phb5l to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:30 +0000 UTC Normal Pod agent-as-sidecar-76d9575d4d-phb5l AddedInterface Add eth0 [10.128.2.85/23] from ovn-kubernetes logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:30 +0000 UTC Normal Pod agent-as-sidecar-76d9575d4d-phb5l.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:30 +0000 UTC Normal Pod agent-as-sidecar-76d9575d4d-phb5l.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:30 +0000 UTC Normal Pod agent-as-sidecar-76d9575d4d-phb5l.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:30 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-76d9575d4d SuccessfulCreate Created pod: agent-as-sidecar-76d9575d4d-phb5l replicaset-controller logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:30 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-76d9575d4d to 1 deployment-controller logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:32 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-mf8fv Binding Scheduled Successfully assigned kuttl-test-tender-buck/vertx-create-span-sidecar-568b7c9f6f-mf8fv to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:32 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-568b7c9f6f SuccessfulCreate Created pod: vertx-create-span-sidecar-568b7c9f6f-mf8fv replicaset-controller logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:32 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-568b7c9f6f to 1 deployment-controller logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:33 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-mf8fv AddedInterface Add eth0 [10.131.0.91/23] from ovn-kubernetes logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:33 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-mf8fv.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:33 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-mf8fv.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:33 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-mf8fv.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:35 +0000 UTC Normal Pod vertx-create-span-sidecar-9dcb658f9-2xp9h Binding Scheduled Successfully assigned kuttl-test-tender-buck/vertx-create-span-sidecar-9dcb658f9-2xp9h to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:35 +0000 UTC Normal Pod vertx-create-span-sidecar-9dcb658f9-2xp9h AddedInterface Add eth0 [10.128.2.86/23] from ovn-kubernetes logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:35 +0000 UTC Normal Pod vertx-create-span-sidecar-9dcb658f9-2xp9h.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:35 +0000 UTC Normal Pod vertx-create-span-sidecar-9dcb658f9-2xp9h.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:35 +0000 UTC Normal Pod vertx-create-span-sidecar-9dcb658f9-2xp9h.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:35 +0000 UTC Normal Pod vertx-create-span-sidecar-9dcb658f9-2xp9h.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:35 +0000 UTC Normal Pod vertx-create-span-sidecar-9dcb658f9-2xp9h.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:35 +0000 UTC Normal Pod vertx-create-span-sidecar-9dcb658f9-2xp9h.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:35 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-9dcb658f9 SuccessfulCreate Created pod: vertx-create-span-sidecar-9dcb658f9-2xp9h replicaset-controller logger.go:42: 07:27:37 | sidecar-skip-webhook | 2023-09-25 07:27:35 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-9dcb658f9 to 1 deployment-controller logger.go:42: 07:27:37 | sidecar-skip-webhook | Deleting namespace: kuttl-test-tender-buck === CONT kuttl/harness/sidecar-deployment logger.go:42: 07:27:43 | sidecar-deployment | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:27:43 | sidecar-deployment | Creating namespace: kuttl-test-electric-grackle logger.go:42: 07:27:43 | sidecar-deployment/0-install | starting test step 0-install logger.go:42: 07:27:43 | sidecar-deployment/0-install | Jaeger:kuttl-test-electric-grackle/agent-as-sidecar created logger.go:42: 07:27:49 | sidecar-deployment/0-install | test step completed 0-install logger.go:42: 07:27:49 | sidecar-deployment/1-install | starting test step 1-install logger.go:42: 07:27:49 | sidecar-deployment/1-install | Deployment:kuttl-test-electric-grackle/vertx-create-span-sidecar created logger.go:42: 07:27:50 | sidecar-deployment/1-install | test step completed 1-install logger.go:42: 07:27:50 | sidecar-deployment/2-enable-injection | starting test step 2-enable-injection logger.go:42: 07:27:50 | sidecar-deployment/2-enable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-electric-grackle] logger.go:42: 07:27:50 | sidecar-deployment/2-enable-injection | deployment.apps/vertx-create-span-sidecar annotated logger.go:42: 07:27:52 | sidecar-deployment/2-enable-injection | test step completed 2-enable-injection logger.go:42: 07:27:52 | sidecar-deployment/3-find-service | starting test step 3-find-service logger.go:42: 07:27:52 | sidecar-deployment/3-find-service | Job:kuttl-test-electric-grackle/00-find-service created logger.go:42: 07:28:03 | sidecar-deployment/3-find-service | test step completed 3-find-service logger.go:42: 07:28:03 | sidecar-deployment/4-other-instance | starting test step 4-other-instance logger.go:42: 07:28:03 | sidecar-deployment/4-other-instance | Jaeger:kuttl-test-electric-grackle/agent-as-sidecar2 created logger.go:42: 07:28:09 | sidecar-deployment/4-other-instance | test step completed 4-other-instance logger.go:42: 07:28:09 | sidecar-deployment/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 07:28:10 | sidecar-deployment/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 07:28:10 | sidecar-deployment/6-find-service | starting test step 6-find-service logger.go:42: 07:28:10 | sidecar-deployment/6-find-service | Job:kuttl-test-electric-grackle/01-find-service created logger.go:42: 07:28:30 | sidecar-deployment/6-find-service | test step completed 6-find-service logger.go:42: 07:28:30 | sidecar-deployment/7-disable-injection | starting test step 7-disable-injection logger.go:42: 07:28:30 | sidecar-deployment/7-disable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=false --namespace kuttl-test-electric-grackle] logger.go:42: 07:28:30 | sidecar-deployment/7-disable-injection | deployment.apps/vertx-create-span-sidecar annotated logger.go:42: 07:28:32 | sidecar-deployment/7-disable-injection | test step completed 7-disable-injection logger.go:42: 07:28:32 | sidecar-deployment | sidecar-deployment events from ns kuttl-test-electric-grackle: logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:46 +0000 UTC Normal Pod agent-as-sidecar-5dd755c95c-528dv Binding Scheduled Successfully assigned kuttl-test-electric-grackle/agent-as-sidecar-5dd755c95c-528dv to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:46 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-5dd755c95c SuccessfulCreate Created pod: agent-as-sidecar-5dd755c95c-528dv replicaset-controller logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:46 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-5dd755c95c to 1 deployment-controller logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:47 +0000 UTC Normal Pod agent-as-sidecar-5dd755c95c-528dv AddedInterface Add eth0 [10.128.2.87/23] from ovn-kubernetes logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:47 +0000 UTC Normal Pod agent-as-sidecar-5dd755c95c-528dv.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:47 +0000 UTC Normal Pod agent-as-sidecar-5dd755c95c-528dv.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:47 +0000 UTC Normal Pod agent-as-sidecar-5dd755c95c-528dv.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:49 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-v5xng Binding Scheduled Successfully assigned kuttl-test-electric-grackle/vertx-create-span-sidecar-568b7c9f6f-v5xng to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:49 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-v5xng AddedInterface Add eth0 [10.131.0.92/23] from ovn-kubernetes logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:49 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-v5xng.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:49 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-v5xng.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:49 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-v5xng.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:49 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-568b7c9f6f SuccessfulCreate Created pod: vertx-create-span-sidecar-568b7c9f6f-v5xng replicaset-controller logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:49 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-568b7c9f6f to 1 deployment-controller logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:50 +0000 UTC Normal Pod vertx-create-span-sidecar-6d75ff8d46-h9fxl Binding Scheduled Successfully assigned kuttl-test-electric-grackle/vertx-create-span-sidecar-6d75ff8d46-h9fxl to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:50 +0000 UTC Warning Pod vertx-create-span-sidecar-6d75ff8d46-h9fxl FailedMount MountVolume.SetUp failed for volume "agent-as-sidecar-service-ca" : configmap references non-existent config key: service-ca.crt kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:50 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-6d75ff8d46 SuccessfulCreate Created pod: vertx-create-span-sidecar-6d75ff8d46-h9fxl replicaset-controller logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:50 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-6d75ff8d46 to 1 deployment-controller logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:51 +0000 UTC Normal Pod vertx-create-span-sidecar-6d75ff8d46-h9fxl AddedInterface Add eth0 [10.128.2.88/23] from ovn-kubernetes logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:51 +0000 UTC Normal Pod vertx-create-span-sidecar-6d75ff8d46-h9fxl.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:51 +0000 UTC Normal Pod vertx-create-span-sidecar-6d75ff8d46-h9fxl.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:51 +0000 UTC Normal Pod vertx-create-span-sidecar-6d75ff8d46-h9fxl.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:51 +0000 UTC Normal Pod vertx-create-span-sidecar-6d75ff8d46-h9fxl.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:51 +0000 UTC Normal Pod vertx-create-span-sidecar-6d75ff8d46-h9fxl.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:51 +0000 UTC Normal Pod vertx-create-span-sidecar-6d75ff8d46-h9fxl.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:52 +0000 UTC Normal Pod 00-find-service-xjs9x Binding Scheduled Successfully assigned kuttl-test-electric-grackle/00-find-service-xjs9x to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:52 +0000 UTC Normal Pod 00-find-service-xjs9x AddedInterface Add eth0 [10.129.2.72/23] from ovn-kubernetes logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:52 +0000 UTC Normal Pod 00-find-service-xjs9x.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:52 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-xjs9x job-controller logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:53 +0000 UTC Normal Pod 00-find-service-xjs9x.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:53 +0000 UTC Normal Pod 00-find-service-xjs9x.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:57 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-v5xng.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.92:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:57 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-v5xng.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.92:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:59 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-v5xng.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:59 +0000 UTC Warning Pod vertx-create-span-sidecar-6d75ff8d46-h9fxl.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.88:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:27:59 +0000 UTC Warning Pod vertx-create-span-sidecar-6d75ff8d46-h9fxl.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.88:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:00 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-v5xng.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.92:8080/": read tcp 10.131.0.2:38152->10.131.0.92:8080: read: connection reset by peer kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:00 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-v5xng.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.92:8080/": dial tcp 10.131.0.92:8080: connect: connection refused kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:01 +0000 UTC Normal Pod vertx-create-span-sidecar-6d75ff8d46-h9fxl.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:01 +0000 UTC Warning Pod vertx-create-span-sidecar-6d75ff8d46-h9fxl.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.88:8080/": read tcp 10.128.2.2:53362->10.128.2.88:8080: read: connection reset by peer kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:01 +0000 UTC Warning Pod vertx-create-span-sidecar-6d75ff8d46-h9fxl.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.88:8080/": dial tcp 10.128.2.88:8080: connect: connection refused kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:03 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:07 +0000 UTC Normal Pod agent-as-sidecar2-65f8878cf7-hwvfd Binding Scheduled Successfully assigned kuttl-test-electric-grackle/agent-as-sidecar2-65f8878cf7-hwvfd to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:07 +0000 UTC Normal Pod agent-as-sidecar2-65f8878cf7-hwvfd AddedInterface Add eth0 [10.131.0.93/23] from ovn-kubernetes logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:07 +0000 UTC Normal Pod agent-as-sidecar2-65f8878cf7-hwvfd.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:07 +0000 UTC Normal Pod agent-as-sidecar2-65f8878cf7-hwvfd.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:07 +0000 UTC Normal Pod agent-as-sidecar2-65f8878cf7-hwvfd.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:07 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-65f8878cf7 SuccessfulCreate Created pod: agent-as-sidecar2-65f8878cf7-hwvfd replicaset-controller logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:07 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-65f8878cf7 to 1 deployment-controller logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:09 +0000 UTC Normal Pod agent-as-sidecar-5dd755c95c-528dv.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:10 +0000 UTC Normal Pod 01-find-service-mt8g9 Binding Scheduled Successfully assigned kuttl-test-electric-grackle/01-find-service-mt8g9 to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:10 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-mt8g9 job-controller logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:11 +0000 UTC Normal Pod 01-find-service-mt8g9 AddedInterface Add eth0 [10.129.2.73/23] from ovn-kubernetes logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:11 +0000 UTC Normal Pod 01-find-service-mt8g9.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-kw6980sb/pipeline@sha256:c436c3b58e8d6b63ab0050001b31694ee98ef1adfeaacf8fa9039c8dd02c20d9" already present on machine kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:11 +0000 UTC Normal Pod 01-find-service-mt8g9.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:11 +0000 UTC Normal Pod 01-find-service-mt8g9.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:11 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-v5xng.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.92:8080/": read tcp 10.131.0.2:52924->10.131.0.92:8080: read: connection reset by peer kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:13 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-568b7c9f6f SuccessfulDelete Deleted pod: vertx-create-span-sidecar-568b7c9f6f-v5xng replicaset-controller logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:13 +0000 UTC Normal Pod vertx-create-span-sidecar-7f568ff488-45jlb Binding Scheduled Successfully assigned kuttl-test-electric-grackle/vertx-create-span-sidecar-7f568ff488-45jlb to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:13 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7f568ff488 SuccessfulCreate Created pod: vertx-create-span-sidecar-7f568ff488-45jlb replicaset-controller logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:13 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-568b7c9f6f to 0 from 1 deployment-controller logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:13 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-7f568ff488 to 1 from 0 deployment-controller logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:14 +0000 UTC Normal Pod vertx-create-span-sidecar-7f568ff488-45jlb AddedInterface Add eth0 [10.128.2.89/23] from ovn-kubernetes logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:14 +0000 UTC Normal Pod vertx-create-span-sidecar-7f568ff488-45jlb.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:14 +0000 UTC Normal Pod vertx-create-span-sidecar-7f568ff488-45jlb.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:14 +0000 UTC Normal Pod vertx-create-span-sidecar-7f568ff488-45jlb.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:14 +0000 UTC Normal Pod vertx-create-span-sidecar-7f568ff488-45jlb.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:14 +0000 UTC Normal Pod vertx-create-span-sidecar-7f568ff488-45jlb.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:14 +0000 UTC Normal Pod vertx-create-span-sidecar-7f568ff488-45jlb.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:23 +0000 UTC Warning Pod vertx-create-span-sidecar-7f568ff488-45jlb.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.89:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:23 +0000 UTC Warning Pod vertx-create-span-sidecar-7f568ff488-45jlb.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.89:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:25 +0000 UTC Normal Pod vertx-create-span-sidecar-7f568ff488-45jlb.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:25 +0000 UTC Warning Pod vertx-create-span-sidecar-7f568ff488-45jlb.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.89:8080/": read tcp 10.128.2.2:46306->10.128.2.89:8080: read: connection reset by peer kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:25 +0000 UTC Warning Pod vertx-create-span-sidecar-7f568ff488-45jlb.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.89:8080/": dial tcp 10.128.2.89:8080: connect: connection refused kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:30 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:30 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-6d75ff8d46 SuccessfulDelete Deleted pod: vertx-create-span-sidecar-6d75ff8d46-h9fxl replicaset-controller logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:30 +0000 UTC Normal Pod vertx-create-span-sidecar-7b964b6c97-gvm2b Binding Scheduled Successfully assigned kuttl-test-electric-grackle/vertx-create-span-sidecar-7b964b6c97-gvm2b to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:30 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7b964b6c97 SuccessfulCreate Created pod: vertx-create-span-sidecar-7b964b6c97-gvm2b replicaset-controller logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:30 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-6d75ff8d46 to 0 from 1 deployment-controller logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:30 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-7b964b6c97 to 1 from 0 deployment-controller logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:31 +0000 UTC Normal Pod vertx-create-span-sidecar-7b964b6c97-gvm2b AddedInterface Add eth0 [10.131.0.94/23] from ovn-kubernetes logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:31 +0000 UTC Normal Pod vertx-create-span-sidecar-7b964b6c97-gvm2b.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:31 +0000 UTC Normal Pod vertx-create-span-sidecar-7b964b6c97-gvm2b.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:28:32 | sidecar-deployment | 2023-09-25 07:28:31 +0000 UTC Normal Pod vertx-create-span-sidecar-7b964b6c97-gvm2b.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:28:32 | sidecar-deployment | Deleting namespace: kuttl-test-electric-grackle === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (140.52s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.75s) --- PASS: kuttl/harness/sidecar-namespace (62.27s) --- PASS: kuttl/harness/sidecar-skip-webhook (16.44s) --- PASS: kuttl/harness/sidecar-deployment (56.03s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name sidecar --report --output /logs/artifacts/sidecar.xml ./artifacts/kuttl-report.xml time="2023-09-25T07:28:39Z" level=debug msg="Setting a new name for the test suites" time="2023-09-25T07:28:39Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-09-25T07:28:39Z" level=debug msg="normalizing test case names" time="2023-09-25T07:28:39Z" level=debug msg="sidecar/artifacts -> sidecar_artifacts" time="2023-09-25T07:28:39Z" level=debug msg="sidecar/sidecar-namespace -> sidecar_sidecar_namespace" time="2023-09-25T07:28:39Z" level=debug msg="sidecar/sidecar-skip-webhook -> sidecar_sidecar_skip_webhook" time="2023-09-25T07:28:39Z" level=debug msg="sidecar/sidecar-deployment -> sidecar_sidecar_deployment" +------------------------------+--------+ | NAME | RESULT | +------------------------------+--------+ | sidecar_artifacts | passed | | sidecar_sidecar_namespace | passed | | sidecar_sidecar_skip_webhook | passed | | sidecar_sidecar_deployment | passed | +------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/sidecar.xml + '[' 0 -gt 0 ']' + '[' 0 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh streaming false true + '[' 3 -ne 3 ']' + test_suite_name=streaming + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/streaming.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-streaming make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=0.32.0 \ SKIP_KAFKA=false \ SKIP_ES_EXTERNAL=true \ ./tests/e2e/streaming/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-24-044110 True False 47m Cluster version is 4.14.0-0.nightly-2023-09-24-044110' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-24-044110 True False 47m Cluster version is 4.14.0-0.nightly-2023-09-24-044110' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 0.32.0 ']' ++ version_le 0.32.0 0.25.0 +++ echo 0.32.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 0.32.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/streaming/render.sh ++ export SUITE_DIR=./tests/e2e/streaming ++ SUITE_DIR=./tests/e2e/streaming ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/streaming ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + '[' false = true ']' + start_test streaming-simple + '[' 1 -ne 1 ']' + test_name=streaming-simple + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-simple' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-simple\e[0m' Rendering files for test streaming-simple + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build + '[' _build '!=' _build ']' + mkdir -p streaming-simple + cd streaming-simple + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./1-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/streaming-jaeger-assert.yaml.template -o ./04-assert.yaml + render_smoke_test simple-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=simple-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + export JAEGER_NAME=simple-streaming + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-tls + '[' 1 -ne 1 ']' + test_name=streaming-with-tls + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-tls' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-tls\e[0m' Rendering files for test streaming-with-tls + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-simple + '[' streaming-simple '!=' _build ']' + cd .. + mkdir -p streaming-with-tls + cd streaming-with-tls + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./1-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + render_smoke_test tls-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=tls-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + export JAEGER_NAME=tls-streaming + JAEGER_NAME=tls-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-autoprovisioning-autoscale + '[' 1 -ne 1 ']' + test_name=streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-autoprovisioning-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-autoprovisioning-autoscale\e[0m' Rendering files for test streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-with-tls + '[' streaming-with-tls '!=' _build ']' + cd .. + mkdir -p streaming-with-autoprovisioning-autoscale + cd streaming-with-autoprovisioning-autoscale + '[' true = true ']' + rm ./00-install.yaml ./00-assert.yaml + render_install_elasticsearch upstream 01 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=01 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./01-assert.yaml + jaeger_name=auto-provisioned + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="20Mi"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="500m"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.autoscale=true ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.minReplicas=1 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.maxReplicas=2 ./02-install.yaml + render_assert_kafka true auto-provisioned 03 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provisioned + test_step=03 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./03-assert.yaml ++ expr 03 + 1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./4-assert.yaml ++ expr 03 + 2 + CLUSTER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./05-assert.yaml + render_install_tracegen auto-provisioned 06 + '[' 2 -ne 2 ']' + jaeger=auto-provisioned + step=06 + replicas=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/tracegen.yaml -o ./06-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.replicas=1 ./06-install.yaml + sed -i s~simple-prod~auto-provisioned~gi ./06-install.yaml + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-tracegen.yaml.template -o ./06-assert.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running streaming E2E tests' Running streaming E2E tests + cd tests/e2e/streaming/_build + set +e + KUBECONFIG=/tmp/kubeconfig-2270117936 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 4 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/streaming-simple === PAUSE kuttl/harness/streaming-simple === RUN kuttl/harness/streaming-with-autoprovisioning-autoscale === PAUSE kuttl/harness/streaming-with-autoprovisioning-autoscale === RUN kuttl/harness/streaming-with-tls === PAUSE kuttl/harness/streaming-with-tls === CONT kuttl/harness/artifacts logger.go:42: 07:28:52 | artifacts | Creating namespace: kuttl-test-true-dolphin logger.go:42: 07:28:52 | artifacts | artifacts events from ns kuttl-test-true-dolphin: logger.go:42: 07:28:52 | artifacts | Deleting namespace: kuttl-test-true-dolphin === CONT kuttl/harness/streaming-with-autoprovisioning-autoscale logger.go:42: 07:28:58 | streaming-with-autoprovisioning-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:28:58 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:28:58 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:28:58 | streaming-with-autoprovisioning-autoscale | Creating namespace: kuttl-test-factual-adder logger.go:42: 07:28:58 | streaming-with-autoprovisioning-autoscale/1-install | starting test step 1-install logger.go:42: 07:28:58 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc create sa deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 07:28:58 | streaming-with-autoprovisioning-autoscale/1-install | serviceaccount/deploy-elasticsearch created logger.go:42: 07:28:58 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc adm policy add-scc-to-user privileged -z deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 07:28:58 | streaming-with-autoprovisioning-autoscale/1-install | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:privileged added: "deploy-elasticsearch" logger.go:42: 07:28:58 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 6] logger.go:42: 07:29:04 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_0.yml -n $NAMESPACE] logger.go:42: 07:29:04 | streaming-with-autoprovisioning-autoscale/1-install | statefulset.apps/elasticsearch created logger.go:42: 07:29:04 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 3] logger.go:42: 07:29:07 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_1.yml -n $NAMESPACE] logger.go:42: 07:29:08 | streaming-with-autoprovisioning-autoscale/1-install | service/elasticsearch created logger.go:42: 07:29:25 | streaming-with-autoprovisioning-autoscale/1-install | test step completed 1-install logger.go:42: 07:29:25 | streaming-with-autoprovisioning-autoscale/2-install | starting test step 2-install logger.go:42: 07:29:25 | streaming-with-autoprovisioning-autoscale/2-install | Jaeger:kuttl-test-factual-adder/auto-provisioned created logger.go:42: 07:29:25 | streaming-with-autoprovisioning-autoscale/2-install | test step completed 2-install logger.go:42: 07:29:25 | streaming-with-autoprovisioning-autoscale/3- | starting test step 3- logger.go:42: 07:29:59 | streaming-with-autoprovisioning-autoscale/3- | test step completed 3- logger.go:42: 07:29:59 | streaming-with-autoprovisioning-autoscale/4- | starting test step 4- logger.go:42: 07:30:32 | streaming-with-autoprovisioning-autoscale/4- | test step completed 4- logger.go:42: 07:30:32 | streaming-with-autoprovisioning-autoscale/5- | starting test step 5- logger.go:42: 07:30:54 | streaming-with-autoprovisioning-autoscale/5- | test step completed 5- logger.go:42: 07:30:54 | streaming-with-autoprovisioning-autoscale/6-install | starting test step 6-install logger.go:42: 07:30:54 | streaming-with-autoprovisioning-autoscale/6-install | Deployment:kuttl-test-factual-adder/tracegen created logger.go:42: 07:30:59 | streaming-with-autoprovisioning-autoscale/6-install | test step completed 6-install logger.go:42: 07:30:59 | streaming-with-autoprovisioning-autoscale/7- | starting test step 7- logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale/7- | test step completed 7- logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | streaming-with-autoprovisioning-autoscale events from ns kuttl-test-factual-adder: logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:29:04 +0000 UTC Normal Pod elasticsearch-0 Binding Scheduled Successfully assigned kuttl-test-factual-adder/elasticsearch-0 to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:29:04 +0000 UTC Normal StatefulSet.apps elasticsearch SuccessfulCreate create Pod elasticsearch-0 in StatefulSet elasticsearch successful statefulset-controller logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:29:05 +0000 UTC Normal Pod elasticsearch-0 AddedInterface Add eth0 [10.128.2.90/23] from ovn-kubernetes logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:29:05 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulling Pulling image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:29:13 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulled Successfully pulled image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" in 8.421797421s (8.421805691s including waiting) kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:29:13 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:29:13 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:29:20 +0000 UTC Warning Pod elasticsearch-0.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Get "http://10.128.2.90:9200/": dial tcp 10.128.2.90:9200: connect: connection refused kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:29:30 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-zookeeper NoPods No matching pods found controllermanager logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:29:30 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:29:30 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-factual-adder/data-auto-provisioned-zookeeper-0" logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:29:30 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ExternalProvisioning waiting for a volume to be created, either by external provisioner "ebs.csi.aws.com" or manually created by system administrator persistentvolume-controller logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:29:33 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-8781f0b7-89c0-41c1-b61a-b6e48c6715b8 logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:29:34 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-factual-adder/auto-provisioned-zookeeper-0 to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:29:36 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-8781f0b7-89c0-41c1-b61a-b6e48c6715b8" attachdetach-controller logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:29:39 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 AddedInterface Add eth0 [10.131.0.95/23] from ovn-kubernetes logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:29:39 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" already present on machine kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:29:39 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:29:39 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:00 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-kafka NoPods No matching pods found controllermanager logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:00 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:01 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-factual-adder/data-0-auto-provisioned-kafka-0" logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:01 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ExternalProvisioning waiting for a volume to be created, either by external provisioner "ebs.csi.aws.com" or manually created by system administrator persistentvolume-controller logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:04 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-da1874f2-9cc7-43f3-92fb-ac0c7cece1df logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:05 +0000 UTC Normal Pod auto-provisioned-kafka-0 Binding Scheduled Successfully assigned kuttl-test-factual-adder/auto-provisioned-kafka-0 to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:07 +0000 UTC Normal Pod auto-provisioned-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-da1874f2-9cc7-43f3-92fb-ac0c7cece1df" attachdetach-controller logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:11 +0000 UTC Normal Pod auto-provisioned-kafka-0 AddedInterface Add eth0 [10.128.2.91/23] from ovn-kubernetes logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:11 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" already present on machine kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:11 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:11 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:33 +0000 UTC Normal Pod auto-provisioned-entity-operator-bf8bbdd84-kb7tk Binding Scheduled Successfully assigned kuttl-test-factual-adder/auto-provisioned-entity-operator-bf8bbdd84-kb7tk to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:33 +0000 UTC Normal Pod auto-provisioned-entity-operator-bf8bbdd84-kb7tk AddedInterface Add eth0 [10.129.2.75/23] from ovn-kubernetes logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:33 +0000 UTC Normal Pod auto-provisioned-entity-operator-bf8bbdd84-kb7tk.spec.containers{topic-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:abdf1ca15c55860e7962057fcb84d2eef800996fcbc3f9f80eeb7efa79dbafcc" already present on machine kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:33 +0000 UTC Normal Pod auto-provisioned-entity-operator-bf8bbdd84-kb7tk.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:33 +0000 UTC Normal Pod auto-provisioned-entity-operator-bf8bbdd84-kb7tk.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:33 +0000 UTC Normal Pod auto-provisioned-entity-operator-bf8bbdd84-kb7tk.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:abdf1ca15c55860e7962057fcb84d2eef800996fcbc3f9f80eeb7efa79dbafcc" already present on machine kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:33 +0000 UTC Normal Pod auto-provisioned-entity-operator-bf8bbdd84-kb7tk.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:33 +0000 UTC Normal Pod auto-provisioned-entity-operator-bf8bbdd84-kb7tk.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:33 +0000 UTC Normal Pod auto-provisioned-entity-operator-bf8bbdd84-kb7tk.spec.containers{tls-sidecar} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" already present on machine kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:33 +0000 UTC Normal ReplicaSet.apps auto-provisioned-entity-operator-bf8bbdd84 SuccessfulCreate Created pod: auto-provisioned-entity-operator-bf8bbdd84-kb7tk replicaset-controller logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:33 +0000 UTC Normal Deployment.apps auto-provisioned-entity-operator ScalingReplicaSet Scaled up replica set auto-provisioned-entity-operator-bf8bbdd84 to 1 deployment-controller logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:34 +0000 UTC Normal Pod auto-provisioned-entity-operator-bf8bbdd84-kb7tk.spec.containers{tls-sidecar} Created Created container tls-sidecar kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:34 +0000 UTC Normal Pod auto-provisioned-entity-operator-bf8bbdd84-kb7tk.spec.containers{tls-sidecar} Started Started container tls-sidecar kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:54 +0000 UTC Normal Pod tracegen-7ddfd7f5cc-crgzz Binding Scheduled Successfully assigned kuttl-test-factual-adder/tracegen-7ddfd7f5cc-crgzz to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:54 +0000 UTC Warning Pod tracegen-7ddfd7f5cc-crgzz FailedMount MountVolume.SetUp failed for volume "auto-provisioned-trusted-ca" : configmap "auto-provisioned-trusted-ca" not found kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:54 +0000 UTC Warning Pod tracegen-7ddfd7f5cc-crgzz FailedMount MountVolume.SetUp failed for volume "auto-provisioned-service-ca" : configmap "auto-provisioned-service-ca" not found kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:54 +0000 UTC Normal ReplicaSet.apps tracegen-7ddfd7f5cc SuccessfulCreate Created pod: tracegen-7ddfd7f5cc-crgzz replicaset-controller logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:54 +0000 UTC Normal Deployment.apps tracegen ScalingReplicaSet Scaled up replica set tracegen-7ddfd7f5cc to 1 deployment-controller logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:56 +0000 UTC Normal Pod auto-provisioned-collector-5977dc44ff-4q54w Binding Scheduled Successfully assigned kuttl-test-factual-adder/auto-provisioned-collector-5977dc44ff-4q54w to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:56 +0000 UTC Warning Pod auto-provisioned-collector-5977dc44ff-4q54w FailedMount MountVolume.SetUp failed for volume "auto-provisioned-collector-tls-config-volume" : secret "auto-provisioned-collector-headless-tls" not found kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:56 +0000 UTC Normal ReplicaSet.apps auto-provisioned-collector-5977dc44ff SuccessfulCreate Created pod: auto-provisioned-collector-5977dc44ff-4q54w replicaset-controller logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:56 +0000 UTC Normal Deployment.apps auto-provisioned-collector ScalingReplicaSet Scaled up replica set auto-provisioned-collector-5977dc44ff to 1 deployment-controller logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:56 +0000 UTC Normal Pod auto-provisioned-ingester-f577f746f-m87th Binding Scheduled Successfully assigned kuttl-test-factual-adder/auto-provisioned-ingester-f577f746f-m87th to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:56 +0000 UTC Normal Pod auto-provisioned-ingester-f577f746f-m87th AddedInterface Add eth0 [10.131.0.97/23] from ovn-kubernetes logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:56 +0000 UTC Normal Pod auto-provisioned-ingester-f577f746f-m87th.spec.containers{jaeger-ingester} Pulled Container image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:6f019a3bc4c491d31a1af50e2929aa6e01b6c2c1fc12acbd0ef12204f4e56d07" already present on machine kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:56 +0000 UTC Normal Pod auto-provisioned-ingester-f577f746f-m87th.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:56 +0000 UTC Normal Pod auto-provisioned-ingester-f577f746f-m87th.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:56 +0000 UTC Normal ReplicaSet.apps auto-provisioned-ingester-f577f746f SuccessfulCreate Created pod: auto-provisioned-ingester-f577f746f-m87th replicaset-controller logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:56 +0000 UTC Normal Deployment.apps auto-provisioned-ingester ScalingReplicaSet Scaled up replica set auto-provisioned-ingester-f577f746f to 1 deployment-controller logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:56 +0000 UTC Normal Pod auto-provisioned-query-c5878cc4-6brfr Binding Scheduled Successfully assigned kuttl-test-factual-adder/auto-provisioned-query-c5878cc4-6brfr to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:56 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-c5878cc4 SuccessfulCreate Created pod: auto-provisioned-query-c5878cc4-6brfr replicaset-controller logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:56 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-c5878cc4 to 1 deployment-controller logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:56 +0000 UTC Normal Pod tracegen-7ddfd7f5cc-crgzz AddedInterface Add eth0 [10.131.0.96/23] from ovn-kubernetes logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:56 +0000 UTC Normal Pod tracegen-7ddfd7f5cc-crgzz.spec.containers{tracegen} Pulling Pulling image "jaegertracing/jaeger-tracegen:1.49.0" kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:57 +0000 UTC Normal Pod auto-provisioned-collector-5977dc44ff-4q54w AddedInterface Add eth0 [10.128.2.92/23] from ovn-kubernetes logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:57 +0000 UTC Normal Pod auto-provisioned-collector-5977dc44ff-4q54w.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:57 +0000 UTC Normal Pod auto-provisioned-collector-5977dc44ff-4q54w.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:57 +0000 UTC Normal Pod auto-provisioned-collector-5977dc44ff-4q54w.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:57 +0000 UTC Normal Pod auto-provisioned-query-c5878cc4-6brfr AddedInterface Add eth0 [10.128.2.93/23] from ovn-kubernetes logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:57 +0000 UTC Normal Pod auto-provisioned-query-c5878cc4-6brfr.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:57 +0000 UTC Normal Pod auto-provisioned-query-c5878cc4-6brfr.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:57 +0000 UTC Normal Pod auto-provisioned-query-c5878cc4-6brfr.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:57 +0000 UTC Normal Pod auto-provisioned-query-c5878cc4-6brfr.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:57 +0000 UTC Normal Pod auto-provisioned-query-c5878cc4-6brfr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:57 +0000 UTC Normal Pod auto-provisioned-query-c5878cc4-6brfr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:57 +0000 UTC Normal Pod auto-provisioned-query-c5878cc4-6brfr.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:57 +0000 UTC Normal Pod auto-provisioned-query-c5878cc4-6brfr.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:57 +0000 UTC Normal Pod auto-provisioned-query-c5878cc4-6brfr.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:57 +0000 UTC Normal Pod tracegen-7ddfd7f5cc-crgzz.spec.containers{tracegen} Pulled Successfully pulled image "jaegertracing/jaeger-tracegen:1.49.0" in 1.0135463s (1.01355328s including waiting) kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:57 +0000 UTC Normal Pod tracegen-7ddfd7f5cc-crgzz.spec.containers{tracegen} Created Created container tracegen kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:57 +0000 UTC Normal Pod tracegen-7ddfd7f5cc-crgzz.spec.containers{tracegen} Started Started container tracegen kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:57 +0000 UTC Normal Pod tracegen-7ddfd7f5cc-crgzz.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:57 +0000 UTC Normal Pod tracegen-7ddfd7f5cc-crgzz.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:30:57 +0000 UTC Normal Pod tracegen-7ddfd7f5cc-crgzz.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:31:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:31:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:31:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:31:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:31:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:31:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:31:56 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:31:56 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod auto-provisioned-collector-5977dc44ff-4q54w horizontal-pod-autoscaler logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:31:56 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:31:56 +0000 UTC Normal Pod auto-provisioned-ingester-f577f746f-plqwl Binding Scheduled Successfully assigned kuttl-test-factual-adder/auto-provisioned-ingester-f577f746f-plqwl to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:31:56 +0000 UTC Normal Pod auto-provisioned-ingester-f577f746f-plqwl AddedInterface Add eth0 [10.129.2.76/23] from ovn-kubernetes logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:31:56 +0000 UTC Normal Pod auto-provisioned-ingester-f577f746f-plqwl.spec.containers{jaeger-ingester} Pulled Container image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:6f019a3bc4c491d31a1af50e2929aa6e01b6c2c1fc12acbd0ef12204f4e56d07" already present on machine kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:31:56 +0000 UTC Normal ReplicaSet.apps auto-provisioned-ingester-f577f746f SuccessfulCreate Created pod: auto-provisioned-ingester-f577f746f-plqwl replicaset-controller logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:31:56 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:31:56 +0000 UTC Normal HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester SuccessfulRescale New size: 2; reason: memory resource utilization (percentage of request) above target horizontal-pod-autoscaler logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:31:56 +0000 UTC Normal Deployment.apps auto-provisioned-ingester ScalingReplicaSet Scaled up replica set auto-provisioned-ingester-f577f746f to 2 from 1 deployment-controller logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:31:57 +0000 UTC Normal Pod auto-provisioned-ingester-f577f746f-plqwl.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | 2023-09-25 07:31:57 +0000 UTC Normal Pod auto-provisioned-ingester-f577f746f-plqwl.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 07:31:59 | streaming-with-autoprovisioning-autoscale | Deleting namespace: kuttl-test-factual-adder === CONT kuttl/harness/streaming-with-tls logger.go:42: 07:32:11 | streaming-with-tls | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:32:11 | streaming-with-tls | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:32:11 | streaming-with-tls | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:32:11 | streaming-with-tls | Creating namespace: kuttl-test-useful-mosquito logger.go:42: 07:32:11 | streaming-with-tls/0-install | starting test step 0-install logger.go:42: 07:32:11 | streaming-with-tls/0-install | running command: [sh -c cd /tmp/jaeger-tests && make undeploy-kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true] logger.go:42: 07:32:11 | streaming-with-tls/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 07:32:11 | streaming-with-tls/0-install | >>>> Skiping kafka-operator undeploy logger.go:42: 07:32:11 | streaming-with-tls/0-install | kubectl delete --namespace kuttl-test-useful-mosquito -f tests/_build/kafka-example.yaml 2>&1 || true logger.go:42: 07:32:11 | streaming-with-tls/0-install | error: the path "tests/_build/kafka-example.yaml" does not exist logger.go:42: 07:32:11 | streaming-with-tls/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 07:32:11 | streaming-with-tls/0-install | running command: [sh -c cd /tmp/jaeger-tests && make kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true KAFKA_VERSION=0.32.0] logger.go:42: 07:32:11 | streaming-with-tls/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 07:32:11 | streaming-with-tls/0-install | >>>> Creating namespace kuttl-test-useful-mosquito logger.go:42: 07:32:11 | streaming-with-tls/0-install | kubectl create namespace kuttl-test-useful-mosquito 2>&1 | grep -v "already exists" || true logger.go:42: 07:32:11 | streaming-with-tls/0-install | >>>> Skipping kafka-operator deployment, assuming it has been installed via OperatorHub logger.go:42: 07:32:11 | streaming-with-tls/0-install | >>>> Creating namespace kuttl-test-useful-mosquito logger.go:42: 07:32:11 | streaming-with-tls/0-install | mkdir -p tests/_build/ logger.go:42: 07:32:11 | streaming-with-tls/0-install | kubectl create namespace kuttl-test-useful-mosquito 2>&1 | grep -v "already exists" || true logger.go:42: 07:32:11 | streaming-with-tls/0-install | curl --fail --location "https://raw.githubusercontent.com/strimzi/strimzi-kafka-operator/0.32.0/examples/kafka/kafka-persistent-single.yaml" --output tests/_build/kafka-example.yaml --create-dirs logger.go:42: 07:32:11 | streaming-with-tls/0-install | % Total % Received % Xferd Average Speed Time Time Time Current logger.go:42: 07:32:11 | streaming-with-tls/0-install | Dload Upload Total Spent Left Speed logger.go:42: 07:32:11 | streaming-with-tls/0-install | 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 865 100 865 0 0 6507 0 --:--:-- --:--:-- --:--:-- 6553 logger.go:42: 07:32:11 | streaming-with-tls/0-install | "sed" -i 's/size: 100Gi/size: 10Gi/g' tests/_build/kafka-example.yaml logger.go:42: 07:32:11 | streaming-with-tls/0-install | kubectl -n kuttl-test-useful-mosquito apply --dry-run=client -f tests/_build/kafka-example.yaml logger.go:42: 07:32:11 | streaming-with-tls/0-install | kafka.kafka.strimzi.io/my-cluster created (dry run) logger.go:42: 07:32:11 | streaming-with-tls/0-install | kubectl -n kuttl-test-useful-mosquito apply -f tests/_build/kafka-example.yaml 2>&1 | grep -v "already exists" || true logger.go:42: 07:32:12 | streaming-with-tls/0-install | kafka.kafka.strimzi.io/my-cluster created logger.go:42: 07:32:12 | streaming-with-tls/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 07:39:13 | streaming-with-tls/0-install | test step failed 0-install case.go:364: failed in step 0-install case.go:366: strimzipodsets.core.strimzi.io "my-cluster-zookeeper" not found logger.go:42: 07:39:13 | streaming-with-tls | streaming-with-tls events from ns kuttl-test-useful-mosquito: logger.go:42: 07:39:13 | streaming-with-tls | Deleting namespace: kuttl-test-useful-mosquito === CONT kuttl/harness/streaming-simple logger.go:42: 07:39:18 | streaming-simple | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:39:18 | streaming-simple | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:39:18 | streaming-simple | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:39:18 | streaming-simple | Creating namespace: kuttl-test-stable-swift logger.go:42: 07:39:18 | streaming-simple/0-install | starting test step 0-install logger.go:42: 07:39:18 | streaming-simple/0-install | running command: [sh -c cd /tmp/jaeger-tests && make undeploy-kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true] logger.go:42: 07:39:18 | streaming-simple/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 07:39:18 | streaming-simple/0-install | >>>> Skiping kafka-operator undeploy logger.go:42: 07:39:18 | streaming-simple/0-install | kubectl delete --namespace kuttl-test-stable-swift -f tests/_build/kafka-example.yaml 2>&1 || true logger.go:42: 07:39:19 | streaming-simple/0-install | Error from server (NotFound): error when deleting "tests/_build/kafka-example.yaml": kafkas.kafka.strimzi.io "my-cluster" not found logger.go:42: 07:39:19 | streaming-simple/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 07:39:19 | streaming-simple/0-install | running command: [sh -c cd /tmp/jaeger-tests && make kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true KAFKA_VERSION=0.32.0] logger.go:42: 07:39:19 | streaming-simple/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 07:39:19 | streaming-simple/0-install | >>>> Creating namespace kuttl-test-stable-swift logger.go:42: 07:39:19 | streaming-simple/0-install | kubectl create namespace kuttl-test-stable-swift 2>&1 | grep -v "already exists" || true logger.go:42: 07:39:19 | streaming-simple/0-install | >>>> Skipping kafka-operator deployment, assuming it has been installed via OperatorHub logger.go:42: 07:39:19 | streaming-simple/0-install | >>>> Creating namespace kuttl-test-stable-swift logger.go:42: 07:39:19 | streaming-simple/0-install | mkdir -p tests/_build/ logger.go:42: 07:39:19 | streaming-simple/0-install | kubectl create namespace kuttl-test-stable-swift 2>&1 | grep -v "already exists" || true logger.go:42: 07:39:19 | streaming-simple/0-install | curl --fail --location "https://raw.githubusercontent.com/strimzi/strimzi-kafka-operator/0.32.0/examples/kafka/kafka-persistent-single.yaml" --output tests/_build/kafka-example.yaml --create-dirs logger.go:42: 07:39:19 | streaming-simple/0-install | % Total % Received % Xferd Average Speed Time Time Time Current logger.go:42: 07:39:19 | streaming-simple/0-install | Dload Upload Total Spent Left Speed logger.go:42: 07:39:19 | streaming-simple/0-install | 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 865 100 865 0 0 7137 0 --:--:-- --:--:-- --:--:-- 7148 logger.go:42: 07:39:19 | streaming-simple/0-install | "sed" -i 's/size: 100Gi/size: 10Gi/g' tests/_build/kafka-example.yaml logger.go:42: 07:39:19 | streaming-simple/0-install | kubectl -n kuttl-test-stable-swift apply --dry-run=client -f tests/_build/kafka-example.yaml logger.go:42: 07:39:19 | streaming-simple/0-install | kafka.kafka.strimzi.io/my-cluster created (dry run) logger.go:42: 07:39:19 | streaming-simple/0-install | kubectl -n kuttl-test-stable-swift apply -f tests/_build/kafka-example.yaml 2>&1 | grep -v "already exists" || true logger.go:42: 07:39:19 | streaming-simple/0-install | kafka.kafka.strimzi.io/my-cluster created logger.go:42: 07:39:19 | streaming-simple/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 07:46:20 | streaming-simple/0-install | test step failed 0-install case.go:364: failed in step 0-install case.go:366: strimzipodsets.core.strimzi.io "my-cluster-zookeeper" not found logger.go:42: 07:46:20 | streaming-simple | streaming-simple events from ns kuttl-test-stable-swift: logger.go:42: 07:46:20 | streaming-simple | Deleting namespace: kuttl-test-stable-swift === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- FAIL: kuttl (1054.14s) --- FAIL: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.74s) --- PASS: kuttl/harness/streaming-with-autoprovisioning-autoscale (192.87s) --- FAIL: kuttl/harness/streaming-with-tls (427.77s) --- FAIL: kuttl/harness/streaming-simple (427.71s) FAIL + exit_code=1 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name streaming --report --output /logs/artifacts/streaming.xml ./artifacts/kuttl-report.xml time="2023-09-25T07:46:27Z" level=debug msg="Setting a new name for the test suites" time="2023-09-25T07:46:27Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-09-25T07:46:27Z" level=debug msg="normalizing test case names" time="2023-09-25T07:46:27Z" level=debug msg="streaming/artifacts -> streaming_artifacts" time="2023-09-25T07:46:27Z" level=debug msg="streaming/streaming-with-autoprovisioning-autoscale -> streaming_streaming_with_autoprovisioning_autoscale" time="2023-09-25T07:46:27Z" level=debug msg="streaming/streaming-with-tls -> streaming_streaming_with_tls" time="2023-09-25T07:46:27Z" level=debug msg="streaming/streaming-simple -> streaming_streaming_simple" +-----------------------------------------------------+--------+ | NAME | RESULT | +-----------------------------------------------------+--------+ | streaming_artifacts | passed | | streaming_streaming_with_autoprovisioning_autoscale | passed | | streaming_streaming_with_tls | failed | | streaming_streaming_simple | failed | +-----------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/sidecar.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/streaming.xml + '[' 2 -gt 0 ']' + count=1 + '[' 1 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh ui false true + '[' 3 -ne 3 ']' + test_suite_name=ui + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/ui.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-ui make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true ./tests/e2e/ui/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-24-044110 True False 65m Cluster version is 4.14.0-0.nightly-2023-09-24-044110' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-24-044110 True False 65m Cluster version is 4.14.0-0.nightly-2023-09-24-044110' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/ui/render.sh ++ export SUITE_DIR=./tests/e2e/ui ++ SUITE_DIR=./tests/e2e/ui ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/ui ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test allinone + '[' 1 -ne 1 ']' + test_name=allinone + echo =========================================================================== =========================================================================== + info 'Rendering files for test allinone' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test allinone\e[0m' Rendering files for test allinone + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build + '[' _build '!=' _build ']' + mkdir -p allinone + cd allinone + export GET_URL_COMMAND + export URL + export JAEGER_NAME=all-in-one-ui + JAEGER_NAME=all-in-one-ui + '[' true = true ']' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./01-curl.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./04-test-ui-config.yaml + start_test production + '[' 1 -ne 1 ']' + test_name=production + echo =========================================================================== =========================================================================== + info 'Rendering files for test production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test production\e[0m' Rendering files for test production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build/allinone + '[' allinone '!=' _build ']' + cd .. + mkdir -p production + cd production + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + [[ true = true ]] + [[ true = true ]] + render_install_jaeger production-ui production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + '[' true = true ']' + INSECURE=true + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-forbbiden-access.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-curl.yaml + INSECURE=true + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./05-check-disabled-security.yaml + ASSERT_PRESENT=false + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./06-check-NO-gaID.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./08-check-gaID.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running ui E2E tests' Running ui E2E tests + cd tests/e2e/ui/_build + set +e + KUBECONFIG=/tmp/kubeconfig-2270117936 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 3 tests === RUN kuttl/harness === RUN kuttl/harness/allinone === PAUSE kuttl/harness/allinone === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/production === PAUSE kuttl/harness/production === CONT kuttl/harness/allinone logger.go:42: 07:46:35 | allinone | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:46:35 | allinone | Creating namespace: kuttl-test-workable-goblin logger.go:42: 07:46:35 | allinone/0-install | starting test step 0-install logger.go:42: 07:46:35 | allinone/0-install | Jaeger:kuttl-test-workable-goblin/all-in-one-ui created logger.go:42: 07:46:39 | allinone/0-install | test step completed 0-install logger.go:42: 07:46:39 | allinone/1-curl | starting test step 1-curl logger.go:42: 07:46:39 | allinone/1-curl | running command: [./ensure-ingress-host.sh] logger.go:42: 07:46:39 | allinone/1-curl | Checking the Ingress host value was populated logger.go:42: 07:46:39 | allinone/1-curl | Try number 0 logger.go:42: 07:46:39 | allinone/1-curl | Hostname is all-in-one-ui-kuttl-test-workable-goblin.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com logger.go:42: 07:46:39 | allinone/1-curl | running command: [sh -c ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE all-in-one-ui] logger.go:42: 07:46:39 | allinone/1-curl | Checking an expected HTTP response logger.go:42: 07:46:39 | allinone/1-curl | Running in OpenShift logger.go:42: 07:46:39 | allinone/1-curl | User not provided. Getting the token... logger.go:42: 07:46:41 | allinone/1-curl | Warning: resource jaegers/all-in-one-ui is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:46:49 | allinone/1-curl | Try number 1/30 the https://all-in-one-ui-kuttl-test-workable-goblin.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 07:46:49 | allinone/1-curl | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:46:49 | allinone/1-curl | Try number 2/30 the https://all-in-one-ui-kuttl-test-workable-goblin.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 07:47:09 | allinone/1-curl | HTTP response is 503. 200 expected. Waiting 10 s logger.go:42: 07:47:19 | allinone/1-curl | Try number 3/30 the https://all-in-one-ui-kuttl-test-workable-goblin.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 07:47:19 | allinone/1-curl | curl response asserted properly logger.go:42: 07:47:19 | allinone/1-curl | test step completed 1-curl logger.go:42: 07:47:19 | allinone/2-delete | starting test step 2-delete logger.go:42: 07:47:19 | allinone/2-delete | Jaeger:kuttl-test-workable-goblin/all-in-one-ui created logger.go:42: 07:47:19 | allinone/2-delete | test step completed 2-delete logger.go:42: 07:47:19 | allinone/3-install | starting test step 3-install logger.go:42: 07:47:19 | allinone/3-install | Jaeger:kuttl-test-workable-goblin/all-in-one-ui updated logger.go:42: 07:47:19 | allinone/3-install | test step completed 3-install logger.go:42: 07:47:19 | allinone/4-test-ui-config | starting test step 4-test-ui-config logger.go:42: 07:47:19 | allinone/4-test-ui-config | running command: [./ensure-ingress-host.sh] logger.go:42: 07:47:19 | allinone/4-test-ui-config | Checking the Ingress host value was populated logger.go:42: 07:47:19 | allinone/4-test-ui-config | Try number 0 logger.go:42: 07:47:19 | allinone/4-test-ui-config | error: error executing jsonpath "{.items[0].status.ingress[0].host}": Error executing template: array index out of bounds: index 0, length 0. Printing more information for debugging the template: logger.go:42: 07:47:19 | allinone/4-test-ui-config | template was: logger.go:42: 07:47:19 | allinone/4-test-ui-config | {.items[0].status.ingress[0].host} logger.go:42: 07:47:19 | allinone/4-test-ui-config | object given to jsonpath engine was: logger.go:42: 07:47:19 | allinone/4-test-ui-config | map[string]interface {}{"apiVersion":"v1", "items":[]interface {}{}, "kind":"List", "metadata":map[string]interface {}{"resourceVersion":""}} logger.go:42: 07:47:19 | allinone/4-test-ui-config | logger.go:42: 07:47:19 | allinone/4-test-ui-config | logger.go:42: 07:47:29 | allinone/4-test-ui-config | Try number 1 logger.go:42: 07:47:29 | allinone/4-test-ui-config | Hostname is all-in-one-ui-kuttl-test-workable-goblin.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com logger.go:42: 07:47:29 | allinone/4-test-ui-config | running command: [sh -c ASSERT_PRESENT=true EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 07:47:29 | allinone/4-test-ui-config | time="2023-09-25T07:47:29Z" level=info msg="Querying https://all-in-one-ui-kuttl-test-workable-goblin.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search..." logger.go:42: 07:47:29 | allinone/4-test-ui-config | time="2023-09-25T07:47:29Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 07:47:29 | allinone/4-test-ui-config | time="2023-09-25T07:47:29Z" level=info msg="Polling to https://all-in-one-ui-kuttl-test-workable-goblin.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search" logger.go:42: 07:47:29 | allinone/4-test-ui-config | time="2023-09-25T07:47:29Z" level=info msg="Doing request number 0" logger.go:42: 07:47:29 | allinone/4-test-ui-config | time="2023-09-25T07:47:29Z" level=warning msg="Status code: 503" logger.go:42: 07:47:29 | allinone/4-test-ui-config | time="2023-09-25T07:47:29Z" level=info msg="Doing request number 1" logger.go:42: 07:47:29 | allinone/4-test-ui-config | time="2023-09-25T07:47:29Z" level=warning msg="Status code: 503" logger.go:42: 07:47:37 | allinone/4-test-ui-config | time="2023-09-25T07:47:37Z" level=info msg="Doing request number 2" logger.go:42: 07:47:37 | allinone/4-test-ui-config | time="2023-09-25T07:47:37Z" level=info msg="Content found and asserted!" logger.go:42: 07:47:37 | allinone/4-test-ui-config | time="2023-09-25T07:47:37Z" level=info msg="Success!" logger.go:42: 07:47:37 | allinone/4-test-ui-config | test step completed 4-test-ui-config logger.go:42: 07:47:37 | allinone | allinone events from ns kuttl-test-workable-goblin: logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:39 +0000 UTC Normal Pod all-in-one-ui-6cd549b669-rtv2m Binding Scheduled Successfully assigned kuttl-test-workable-goblin/all-in-one-ui-6cd549b669-rtv2m to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:39 +0000 UTC Normal Pod all-in-one-ui-6cd549b669-rtv2m AddedInterface Add eth0 [10.131.0.98/23] from ovn-kubernetes logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:39 +0000 UTC Normal Pod all-in-one-ui-6cd549b669-rtv2m.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:39 +0000 UTC Normal Pod all-in-one-ui-6cd549b669-rtv2m.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:39 +0000 UTC Normal Pod all-in-one-ui-6cd549b669-rtv2m.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:39 +0000 UTC Normal Pod all-in-one-ui-6cd549b669-rtv2m.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:39 +0000 UTC Normal Pod all-in-one-ui-6cd549b669-rtv2m.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:39 +0000 UTC Normal Pod all-in-one-ui-6cd549b669-rtv2m.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:39 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-6cd549b669 SuccessfulCreate Created pod: all-in-one-ui-6cd549b669-rtv2m replicaset-controller logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:39 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-6cd549b669 to 1 deployment-controller logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:46 +0000 UTC Normal Pod all-in-one-ui-6cd549b669-rtv2m.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:46 +0000 UTC Normal Pod all-in-one-ui-6cd549b669-rtv2m.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:46 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-6cd549b669 SuccessfulDelete Deleted pod: all-in-one-ui-6cd549b669-rtv2m replicaset-controller logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:46 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled down replica set all-in-one-ui-6cd549b669 to 0 from 1 deployment-controller logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:47 +0000 UTC Normal Pod all-in-one-ui-55686595dc-m7kcv Binding Scheduled Successfully assigned kuttl-test-workable-goblin/all-in-one-ui-55686595dc-m7kcv to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:47 +0000 UTC Normal Pod all-in-one-ui-55686595dc-m7kcv AddedInterface Add eth0 [10.131.0.99/23] from ovn-kubernetes logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:47 +0000 UTC Normal Pod all-in-one-ui-55686595dc-m7kcv.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:47 +0000 UTC Normal Pod all-in-one-ui-55686595dc-m7kcv.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:47 +0000 UTC Normal Pod all-in-one-ui-55686595dc-m7kcv.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:47 +0000 UTC Normal Pod all-in-one-ui-55686595dc-m7kcv.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:47 +0000 UTC Normal Pod all-in-one-ui-55686595dc-m7kcv.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:47 +0000 UTC Normal Pod all-in-one-ui-55686595dc-m7kcv.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:47 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-55686595dc SuccessfulCreate Created pod: all-in-one-ui-55686595dc-m7kcv replicaset-controller logger.go:42: 07:47:37 | allinone | 2023-09-25 07:46:47 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-55686595dc to 1 deployment-controller logger.go:42: 07:47:37 | allinone | 2023-09-25 07:47:19 +0000 UTC Normal Pod all-in-one-ui-55686595dc-m7kcv.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:47:37 | allinone | 2023-09-25 07:47:19 +0000 UTC Normal Pod all-in-one-ui-55686595dc-m7kcv.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:47:37 | allinone | 2023-09-25 07:47:25 +0000 UTC Normal Pod all-in-one-ui-5574f845db-szh66 Binding Scheduled Successfully assigned kuttl-test-workable-goblin/all-in-one-ui-5574f845db-szh66 to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:47:37 | allinone | 2023-09-25 07:47:25 +0000 UTC Normal Pod all-in-one-ui-5574f845db-szh66 AddedInterface Add eth0 [10.128.2.95/23] from ovn-kubernetes logger.go:42: 07:47:37 | allinone | 2023-09-25 07:47:25 +0000 UTC Normal Pod all-in-one-ui-5574f845db-szh66.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:47:37 | allinone | 2023-09-25 07:47:25 +0000 UTC Normal Pod all-in-one-ui-5574f845db-szh66.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:47:37 | allinone | 2023-09-25 07:47:25 +0000 UTC Normal Pod all-in-one-ui-5574f845db-szh66.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:47:37 | allinone | 2023-09-25 07:47:25 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-5574f845db SuccessfulCreate Created pod: all-in-one-ui-5574f845db-szh66 replicaset-controller logger.go:42: 07:47:37 | allinone | 2023-09-25 07:47:25 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-5574f845db to 1 deployment-controller logger.go:42: 07:47:37 | allinone | Deleting namespace: kuttl-test-workable-goblin === CONT kuttl/harness/production logger.go:42: 07:47:43 | production | Ignoring add-tracking-id.yaml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:47:43 | production | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:47:43 | production | Creating namespace: kuttl-test-gentle-ray logger.go:42: 07:47:43 | production/1-install | starting test step 1-install logger.go:42: 07:47:43 | production/1-install | Jaeger:kuttl-test-gentle-ray/production-ui created logger.go:42: 07:48:20 | production/1-install | test step completed 1-install logger.go:42: 07:48:20 | production/2-check-forbbiden-access | starting test step 2-check-forbbiden-access logger.go:42: 07:48:20 | production/2-check-forbbiden-access | running command: [./ensure-ingress-host.sh] logger.go:42: 07:48:20 | production/2-check-forbbiden-access | Checking the Ingress host value was populated logger.go:42: 07:48:20 | production/2-check-forbbiden-access | Try number 0 logger.go:42: 07:48:20 | production/2-check-forbbiden-access | Hostname is production-ui-kuttl-test-gentle-ray.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com logger.go:42: 07:48:20 | production/2-check-forbbiden-access | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE production-ui] logger.go:42: 07:48:20 | production/2-check-forbbiden-access | Checking an expected HTTP response logger.go:42: 07:48:20 | production/2-check-forbbiden-access | Running in OpenShift logger.go:42: 07:48:20 | production/2-check-forbbiden-access | Not using any secret logger.go:42: 07:48:20 | production/2-check-forbbiden-access | Try number 1/30 the https://production-ui-kuttl-test-gentle-ray.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 07:48:20 | production/2-check-forbbiden-access | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:48:20 | production/2-check-forbbiden-access | Try number 2/30 the https://production-ui-kuttl-test-gentle-ray.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 07:48:20 | production/2-check-forbbiden-access | HTTP response is 503. 403 expected. Waiting 10 s logger.go:42: 07:48:30 | production/2-check-forbbiden-access | Try number 3/30 the https://production-ui-kuttl-test-gentle-ray.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 07:48:30 | production/2-check-forbbiden-access | curl response asserted properly logger.go:42: 07:48:30 | production/2-check-forbbiden-access | test step completed 2-check-forbbiden-access logger.go:42: 07:48:30 | production/3-curl | starting test step 3-curl logger.go:42: 07:48:30 | production/3-curl | running command: [./ensure-ingress-host.sh] logger.go:42: 07:48:30 | production/3-curl | Checking the Ingress host value was populated logger.go:42: 07:48:30 | production/3-curl | Try number 0 logger.go:42: 07:48:30 | production/3-curl | Hostname is production-ui-kuttl-test-gentle-ray.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com logger.go:42: 07:48:30 | production/3-curl | running command: [sh -c ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE production-ui] logger.go:42: 07:48:30 | production/3-curl | Checking an expected HTTP response logger.go:42: 07:48:30 | production/3-curl | Running in OpenShift logger.go:42: 07:48:30 | production/3-curl | User not provided. Getting the token... logger.go:42: 07:48:32 | production/3-curl | Warning: resource jaegers/production-ui is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:48:38 | production/3-curl | Try number 1/30 the https://production-ui-kuttl-test-gentle-ray.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 07:48:38 | production/3-curl | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:48:38 | production/3-curl | Try number 2/30 the https://production-ui-kuttl-test-gentle-ray.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 07:48:38 | production/3-curl | HTTP response is 503. 200 expected. Waiting 10 s logger.go:42: 07:48:48 | production/3-curl | Try number 3/30 the https://production-ui-kuttl-test-gentle-ray.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 07:48:48 | production/3-curl | curl response asserted properly logger.go:42: 07:48:48 | production/3-curl | test step completed 3-curl logger.go:42: 07:48:48 | production/4-install | starting test step 4-install logger.go:42: 07:48:48 | production/4-install | Jaeger:kuttl-test-gentle-ray/production-ui updated logger.go:42: 07:48:48 | production/4-install | test step completed 4-install logger.go:42: 07:48:48 | production/5-check-disabled-security | starting test step 5-check-disabled-security logger.go:42: 07:48:48 | production/5-check-disabled-security | running command: [./ensure-ingress-host.sh] logger.go:42: 07:48:48 | production/5-check-disabled-security | Checking the Ingress host value was populated logger.go:42: 07:48:48 | production/5-check-disabled-security | Try number 0 logger.go:42: 07:48:48 | production/5-check-disabled-security | Hostname is production-ui-kuttl-test-gentle-ray.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com logger.go:42: 07:48:48 | production/5-check-disabled-security | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE production-ui] logger.go:42: 07:48:48 | production/5-check-disabled-security | Checking an expected HTTP response logger.go:42: 07:48:48 | production/5-check-disabled-security | Running in OpenShift logger.go:42: 07:48:48 | production/5-check-disabled-security | Not using any secret logger.go:42: 07:48:48 | production/5-check-disabled-security | Try number 1/30 the https://production-ui-kuttl-test-gentle-ray.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 07:48:48 | production/5-check-disabled-security | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:48:48 | production/5-check-disabled-security | Try number 2/30 the https://production-ui-kuttl-test-gentle-ray.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 07:48:48 | production/5-check-disabled-security | HTTP response is 403. 200 expected. Waiting 10 s logger.go:42: 07:48:58 | production/5-check-disabled-security | Try number 3/30 the https://production-ui-kuttl-test-gentle-ray.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 07:48:58 | production/5-check-disabled-security | curl response asserted properly logger.go:42: 07:48:58 | production/5-check-disabled-security | test step completed 5-check-disabled-security logger.go:42: 07:48:58 | production/6-check-NO-gaID | starting test step 6-check-NO-gaID logger.go:42: 07:48:58 | production/6-check-NO-gaID | running command: [./ensure-ingress-host.sh] logger.go:42: 07:48:58 | production/6-check-NO-gaID | Checking the Ingress host value was populated logger.go:42: 07:48:58 | production/6-check-NO-gaID | Try number 0 logger.go:42: 07:48:58 | production/6-check-NO-gaID | Hostname is production-ui-kuttl-test-gentle-ray.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com logger.go:42: 07:48:58 | production/6-check-NO-gaID | running command: [sh -c ASSERT_PRESENT=false EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 07:48:59 | production/6-check-NO-gaID | time="2023-09-25T07:48:59Z" level=info msg="Querying https://production-ui-kuttl-test-gentle-ray.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search..." logger.go:42: 07:48:59 | production/6-check-NO-gaID | time="2023-09-25T07:48:59Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 07:48:59 | production/6-check-NO-gaID | time="2023-09-25T07:48:59Z" level=info msg="Polling to https://production-ui-kuttl-test-gentle-ray.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search" logger.go:42: 07:48:59 | production/6-check-NO-gaID | time="2023-09-25T07:48:59Z" level=info msg="Doing request number 0" logger.go:42: 07:48:59 | production/6-check-NO-gaID | time="2023-09-25T07:48:59Z" level=info msg="Content not found and asserted it was not found!" logger.go:42: 07:48:59 | production/6-check-NO-gaID | time="2023-09-25T07:48:59Z" level=info msg="Success!" logger.go:42: 07:48:59 | production/6-check-NO-gaID | test step completed 6-check-NO-gaID logger.go:42: 07:48:59 | production/7-add-tracking-id | starting test step 7-add-tracking-id logger.go:42: 07:48:59 | production/7-add-tracking-id | running command: [sh -c kubectl apply -f add-tracking-id.yaml -n $NAMESPACE] logger.go:42: 07:48:59 | production/7-add-tracking-id | jaeger.jaegertracing.io/production-ui configured logger.go:42: 07:48:59 | production/7-add-tracking-id | test step completed 7-add-tracking-id logger.go:42: 07:48:59 | production/8-check-gaID | starting test step 8-check-gaID logger.go:42: 07:48:59 | production/8-check-gaID | running command: [./ensure-ingress-host.sh] logger.go:42: 07:48:59 | production/8-check-gaID | Checking the Ingress host value was populated logger.go:42: 07:48:59 | production/8-check-gaID | Try number 0 logger.go:42: 07:48:59 | production/8-check-gaID | Hostname is production-ui-kuttl-test-gentle-ray.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com logger.go:42: 07:48:59 | production/8-check-gaID | running command: [sh -c ASSERT_PRESENT=true EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 07:49:00 | production/8-check-gaID | time="2023-09-25T07:49:00Z" level=info msg="Querying https://production-ui-kuttl-test-gentle-ray.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search..." logger.go:42: 07:49:00 | production/8-check-gaID | time="2023-09-25T07:49:00Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 07:49:00 | production/8-check-gaID | time="2023-09-25T07:49:00Z" level=info msg="Polling to https://production-ui-kuttl-test-gentle-ray.apps.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com/search" logger.go:42: 07:49:00 | production/8-check-gaID | time="2023-09-25T07:49:00Z" level=info msg="Doing request number 0" logger.go:42: 07:49:00 | production/8-check-gaID | time="2023-09-25T07:49:00Z" level=warning msg="Found: false . Assert: true" logger.go:42: 07:49:00 | production/8-check-gaID | time="2023-09-25T07:49:00Z" level=warning msg="The condition of the test function was not accomplished" logger.go:42: 07:49:00 | production/8-check-gaID | time="2023-09-25T07:49:00Z" level=info msg="Doing request number 1" logger.go:42: 07:49:00 | production/8-check-gaID | time="2023-09-25T07:49:00Z" level=warning msg="Found: false . Assert: true" logger.go:42: 07:49:00 | production/8-check-gaID | time="2023-09-25T07:49:00Z" level=warning msg="The condition of the test function was not accomplished" logger.go:42: 07:49:08 | production/8-check-gaID | time="2023-09-25T07:49:08Z" level=info msg="Doing request number 2" logger.go:42: 07:49:08 | production/8-check-gaID | time="2023-09-25T07:49:08Z" level=info msg="Content found and asserted!" logger.go:42: 07:49:08 | production/8-check-gaID | time="2023-09-25T07:49:08Z" level=info msg="Success!" logger.go:42: 07:49:08 | production/8-check-gaID | test step completed 8-check-gaID logger.go:42: 07:49:08 | production | production events from ns kuttl-test-gentle-ray: logger.go:42: 07:49:08 | production | 2023-09-25 07:47:50 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestgentlerayproductionui-1-f6b4fb57 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestgentlerayproductionui-1-f6b4fb5882vj replicaset-controller logger.go:42: 07:49:08 | production | 2023-09-25 07:47:50 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestgentlerayproductionui-1-f6b4fb5882vj Binding Scheduled Successfully assigned kuttl-test-gentle-ray/elasticsearch-cdm-kuttltestgentlerayproductionui-1-f6b4fb5882vj to ip-10-0-49-86.ec2.internal default-scheduler logger.go:42: 07:49:08 | production | 2023-09-25 07:47:50 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestgentlerayproductionui-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestgentlerayproductionui-1-f6b4fb57 to 1 deployment-controller logger.go:42: 07:49:08 | production | 2023-09-25 07:47:51 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestgentlerayproductionui-1-f6b4fb5882vj AddedInterface Add eth0 [10.129.2.77/23] from ovn-kubernetes logger.go:42: 07:49:08 | production | 2023-09-25 07:47:51 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestgentlerayproductionui-1-f6b4fb5882vj.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:47:51 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestgentlerayproductionui-1-f6b4fb5882vj.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:47:51 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestgentlerayproductionui-1-f6b4fb5882vj.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:47:51 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestgentlerayproductionui-1-f6b4fb5882vj.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:47:51 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestgentlerayproductionui-1-f6b4fb5882vj.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:47:51 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestgentlerayproductionui-1-f6b4fb5882vj.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:01 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestgentlerayproductionui-1-f6b4fb5882vj.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:06 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestgentlerayproductionui-1-f6b4fb5882vj.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:17 +0000 UTC Normal Pod production-ui-collector-7696dc959d-7sqcf Binding Scheduled Successfully assigned kuttl-test-gentle-ray/production-ui-collector-7696dc959d-7sqcf to ip-10-0-56-78.ec2.internal default-scheduler logger.go:42: 07:49:08 | production | 2023-09-25 07:48:17 +0000 UTC Warning Pod production-ui-collector-7696dc959d-7sqcf FailedMount MountVolume.SetUp failed for volume "production-ui-collector-tls-config-volume" : secret "production-ui-collector-headless-tls" not found kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:17 +0000 UTC Normal ReplicaSet.apps production-ui-collector-7696dc959d SuccessfulCreate Created pod: production-ui-collector-7696dc959d-7sqcf replicaset-controller logger.go:42: 07:49:08 | production | 2023-09-25 07:48:17 +0000 UTC Normal Deployment.apps production-ui-collector ScalingReplicaSet Scaled up replica set production-ui-collector-7696dc959d to 1 deployment-controller logger.go:42: 07:49:08 | production | 2023-09-25 07:48:17 +0000 UTC Normal Pod production-ui-query-8544fbffd8-kj97r Binding Scheduled Successfully assigned kuttl-test-gentle-ray/production-ui-query-8544fbffd8-kj97r to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:49:08 | production | 2023-09-25 07:48:17 +0000 UTC Warning Pod production-ui-query-8544fbffd8-kj97r FailedMount MountVolume.SetUp failed for volume "production-ui-ui-oauth-proxy-tls" : secret "production-ui-ui-oauth-proxy-tls" not found kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:17 +0000 UTC Normal ReplicaSet.apps production-ui-query-8544fbffd8 SuccessfulCreate Created pod: production-ui-query-8544fbffd8-kj97r replicaset-controller logger.go:42: 07:49:08 | production | 2023-09-25 07:48:17 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-8544fbffd8 to 1 deployment-controller logger.go:42: 07:49:08 | production | 2023-09-25 07:48:18 +0000 UTC Normal Pod production-ui-collector-7696dc959d-7sqcf AddedInterface Add eth0 [10.128.2.96/23] from ovn-kubernetes logger.go:42: 07:49:08 | production | 2023-09-25 07:48:18 +0000 UTC Normal Pod production-ui-collector-7696dc959d-7sqcf.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:18 +0000 UTC Normal Pod production-ui-collector-7696dc959d-7sqcf.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:18 +0000 UTC Normal Pod production-ui-collector-7696dc959d-7sqcf.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:18 +0000 UTC Normal Pod production-ui-query-8544fbffd8-kj97r AddedInterface Add eth0 [10.131.0.100/23] from ovn-kubernetes logger.go:42: 07:49:08 | production | 2023-09-25 07:48:18 +0000 UTC Normal Pod production-ui-query-8544fbffd8-kj97r.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:18 +0000 UTC Normal Pod production-ui-query-8544fbffd8-kj97r.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:18 +0000 UTC Normal Pod production-ui-query-8544fbffd8-kj97r.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:18 +0000 UTC Normal Pod production-ui-query-8544fbffd8-kj97r.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:18 +0000 UTC Normal Pod production-ui-query-8544fbffd8-kj97r.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:18 +0000 UTC Normal Pod production-ui-query-8544fbffd8-kj97r.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:18 +0000 UTC Normal Pod production-ui-query-8544fbffd8-kj97r.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:18 +0000 UTC Normal Pod production-ui-query-8544fbffd8-kj97r.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:18 +0000 UTC Normal Pod production-ui-query-8544fbffd8-kj97r.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:32 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:49:08 | production | 2023-09-25 07:48:32 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:49:08 | production | 2023-09-25 07:48:32 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:49:08 | production | 2023-09-25 07:48:33 +0000 UTC Normal Pod production-ui-query-8544fbffd8-kj97r.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:33 +0000 UTC Normal Pod production-ui-query-8544fbffd8-kj97r.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:33 +0000 UTC Normal Pod production-ui-query-8544fbffd8-kj97r.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:33 +0000 UTC Normal ReplicaSet.apps production-ui-query-8544fbffd8 SuccessfulDelete Deleted pod: production-ui-query-8544fbffd8-kj97r replicaset-controller logger.go:42: 07:49:08 | production | 2023-09-25 07:48:33 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-8544fbffd8 to 0 from 1 deployment-controller logger.go:42: 07:49:08 | production | 2023-09-25 07:48:34 +0000 UTC Normal Pod production-ui-query-775d68b5b6-5l2xs Binding Scheduled Successfully assigned kuttl-test-gentle-ray/production-ui-query-775d68b5b6-5l2xs to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:49:08 | production | 2023-09-25 07:48:34 +0000 UTC Normal Pod production-ui-query-775d68b5b6-5l2xs AddedInterface Add eth0 [10.131.0.101/23] from ovn-kubernetes logger.go:42: 07:49:08 | production | 2023-09-25 07:48:34 +0000 UTC Normal Pod production-ui-query-775d68b5b6-5l2xs.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:34 +0000 UTC Normal Pod production-ui-query-775d68b5b6-5l2xs.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:34 +0000 UTC Normal Pod production-ui-query-775d68b5b6-5l2xs.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:34 +0000 UTC Normal Pod production-ui-query-775d68b5b6-5l2xs.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5401cc799dcdb71236d40fa42089ebce5a59d4d958782d51949365779c046d44" already present on machine kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:34 +0000 UTC Normal Pod production-ui-query-775d68b5b6-5l2xs.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:34 +0000 UTC Normal Pod production-ui-query-775d68b5b6-5l2xs.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:34 +0000 UTC Normal Pod production-ui-query-775d68b5b6-5l2xs.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:34 +0000 UTC Normal ReplicaSet.apps production-ui-query-775d68b5b6 SuccessfulCreate Created pod: production-ui-query-775d68b5b6-5l2xs replicaset-controller logger.go:42: 07:49:08 | production | 2023-09-25 07:48:34 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-775d68b5b6 to 1 deployment-controller logger.go:42: 07:49:08 | production | 2023-09-25 07:48:35 +0000 UTC Normal Pod production-ui-query-775d68b5b6-5l2xs.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:35 +0000 UTC Normal Pod production-ui-query-775d68b5b6-5l2xs.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:49 +0000 UTC Normal Pod production-ui-query-775d68b5b6-5l2xs.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:49 +0000 UTC Normal Pod production-ui-query-775d68b5b6-5l2xs.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:49 +0000 UTC Normal Pod production-ui-query-775d68b5b6-5l2xs.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:49 +0000 UTC Normal ReplicaSet.apps production-ui-query-775d68b5b6 SuccessfulDelete Deleted pod: production-ui-query-775d68b5b6-5l2xs replicaset-controller logger.go:42: 07:49:08 | production | 2023-09-25 07:48:49 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-775d68b5b6 to 0 from 1 deployment-controller logger.go:42: 07:49:08 | production | 2023-09-25 07:48:50 +0000 UTC Normal Pod production-ui-query-69479f7bdf-vf8hr Binding Scheduled Successfully assigned kuttl-test-gentle-ray/production-ui-query-69479f7bdf-vf8hr to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:49:08 | production | 2023-09-25 07:48:50 +0000 UTC Normal Pod production-ui-query-69479f7bdf-vf8hr AddedInterface Add eth0 [10.131.0.102/23] from ovn-kubernetes logger.go:42: 07:49:08 | production | 2023-09-25 07:48:50 +0000 UTC Normal Pod production-ui-query-69479f7bdf-vf8hr.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:50 +0000 UTC Normal Pod production-ui-query-69479f7bdf-vf8hr.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:50 +0000 UTC Normal Pod production-ui-query-69479f7bdf-vf8hr.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:50 +0000 UTC Normal Pod production-ui-query-69479f7bdf-vf8hr.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:50 +0000 UTC Normal Pod production-ui-query-69479f7bdf-vf8hr.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:50 +0000 UTC Normal Pod production-ui-query-69479f7bdf-vf8hr.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:48:50 +0000 UTC Normal ReplicaSet.apps production-ui-query-69479f7bdf SuccessfulCreate Created pod: production-ui-query-69479f7bdf-vf8hr replicaset-controller logger.go:42: 07:49:08 | production | 2023-09-25 07:48:50 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-69479f7bdf to 1 deployment-controller logger.go:42: 07:49:08 | production | 2023-09-25 07:49:00 +0000 UTC Normal Pod production-ui-query-69479f7bdf-vf8hr.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:49:00 +0000 UTC Normal Pod production-ui-query-69479f7bdf-vf8hr.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:49:00 +0000 UTC Normal ReplicaSet.apps production-ui-query-69479f7bdf SuccessfulDelete Deleted pod: production-ui-query-69479f7bdf-vf8hr replicaset-controller logger.go:42: 07:49:08 | production | 2023-09-25 07:49:00 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-69479f7bdf to 0 from 1 deployment-controller logger.go:42: 07:49:08 | production | 2023-09-25 07:49:01 +0000 UTC Normal Pod production-ui-query-5c9fc79fd6-7mh4m Binding Scheduled Successfully assigned kuttl-test-gentle-ray/production-ui-query-5c9fc79fd6-7mh4m to ip-10-0-111-85.ec2.internal default-scheduler logger.go:42: 07:49:08 | production | 2023-09-25 07:49:01 +0000 UTC Normal Pod production-ui-query-5c9fc79fd6-7mh4m AddedInterface Add eth0 [10.131.0.103/23] from ovn-kubernetes logger.go:42: 07:49:08 | production | 2023-09-25 07:49:01 +0000 UTC Normal Pod production-ui-query-5c9fc79fd6-7mh4m.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:49:01 +0000 UTC Normal Pod production-ui-query-5c9fc79fd6-7mh4m.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:49:01 +0000 UTC Normal Pod production-ui-query-5c9fc79fd6-7mh4m.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:49:01 +0000 UTC Normal Pod production-ui-query-5c9fc79fd6-7mh4m.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:49:01 +0000 UTC Normal ReplicaSet.apps production-ui-query-5c9fc79fd6 SuccessfulCreate Created pod: production-ui-query-5c9fc79fd6-7mh4m replicaset-controller logger.go:42: 07:49:08 | production | 2023-09-25 07:49:01 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-5c9fc79fd6 to 1 deployment-controller logger.go:42: 07:49:08 | production | 2023-09-25 07:49:02 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:49:08 | production | 2023-09-25 07:49:02 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod production-ui-collector-7696dc959d-7sqcf horizontal-pod-autoscaler logger.go:42: 07:49:08 | production | 2023-09-25 07:49:02 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:49:08 | production | 2023-09-25 07:49:02 +0000 UTC Normal Pod production-ui-query-5c9fc79fd6-7mh4m.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:49:08 | production | 2023-09-25 07:49:02 +0000 UTC Normal Pod production-ui-query-5c9fc79fd6-7mh4m.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:49:08 | production | Deleting namespace: kuttl-test-gentle-ray === CONT kuttl/harness/artifacts logger.go:42: 07:49:14 | artifacts | Creating namespace: kuttl-test-cuddly-haddock logger.go:42: 07:49:14 | artifacts | artifacts events from ns kuttl-test-cuddly-haddock: logger.go:42: 07:49:14 | artifacts | Deleting namespace: kuttl-test-cuddly-haddock === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (165.18s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/allinone (68.39s) --- PASS: kuttl/harness/production (90.85s) --- PASS: kuttl/harness/artifacts (5.90s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name ui --report --output /logs/artifacts/ui.xml ./artifacts/kuttl-report.xml time="2023-09-25T07:49:20Z" level=debug msg="Setting a new name for the test suites" time="2023-09-25T07:49:20Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-09-25T07:49:20Z" level=debug msg="normalizing test case names" time="2023-09-25T07:49:20Z" level=debug msg="ui/allinone -> ui_allinone" time="2023-09-25T07:49:20Z" level=debug msg="ui/production -> ui_production" time="2023-09-25T07:49:20Z" level=debug msg="ui/artifacts -> ui_artifacts" +---------------+--------+ | NAME | RESULT | +---------------+--------+ | ui_allinone | passed | | ui_production | passed | | ui_artifacts | passed | +---------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/sidecar.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/streaming.xml + '[' 2 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/ui.xml + '[' 0 -gt 0 ']' + '[' 1 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh upgrade false true + '[' 3 -ne 3 ']' + test_suite_name=upgrade + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/upgrade.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-upgrade make[2]: Entering directory '/tmp/jaeger-tests' make docker JAEGER_VERSION=1.49.1 IMG="quay.io//jaeger-operator:next" make[3]: Entering directory '/tmp/jaeger-tests' [ ! -z "true" ] || docker build --build-arg=GOPROXY= --build-arg=VERSION="1.49.0" --build-arg=JAEGER_VERSION=1.49.1 --build-arg=TARGETARCH= --build-arg VERSION_DATE=2023-09-25T07:49:21Z --build-arg VERSION_PKG="github.com/jaegertracing/jaeger-operator/pkg/version" -t "quay.io//jaeger-operator:next" . make[3]: Leaving directory '/tmp/jaeger-tests' touch build-e2e-upgrade-image SKIP_ES_EXTERNAL=true IMG=quay.io//jaeger-operator:"1.49.0" JAEGER_OPERATOR_VERSION="1.49.0" JAEGER_VERSION="1.49.0" ./tests/e2e/upgrade/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-24-044110 True False 68m Cluster version is 4.14.0-0.nightly-2023-09-24-044110' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-24-044110 True False 68m Cluster version is 4.14.0-0.nightly-2023-09-24-044110' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/upgrade/render.sh ++ export SUITE_DIR=./tests/e2e/upgrade ++ SUITE_DIR=./tests/e2e/upgrade ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/upgrade ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + export JAEGER_NAME + '[' true = true ']' + skip_test upgrade 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade + warning 'upgrade: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade: Test not supported in OpenShift\e[0m' WAR: upgrade: Test not supported in OpenShift + '[' true = true ']' + skip_test upgrade-from-latest-release 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade-from-latest-release + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade-from-latest-release + warning 'upgrade-from-latest-release: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade-from-latest-release: Test not supported in OpenShift\e[0m' WAR: upgrade-from-latest-release: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running upgrade E2E tests' Running upgrade E2E tests + cd tests/e2e/upgrade/_build + set +e + KUBECONFIG=/tmp/kubeconfig-2270117936 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-kw6980sb-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 07:49:22 | artifacts | Creating namespace: kuttl-test-profound-pony logger.go:42: 07:49:22 | artifacts | artifacts events from ns kuttl-test-profound-pony: logger.go:42: 07:49:22 | artifacts | Deleting namespace: kuttl-test-profound-pony === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (5.80s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.75s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name upgrade --report --output /logs/artifacts/upgrade.xml ./artifacts/kuttl-report.xml time="2023-09-25T07:49:28Z" level=debug msg="Setting a new name for the test suites" time="2023-09-25T07:49:28Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-09-25T07:49:28Z" level=debug msg="normalizing test case names" time="2023-09-25T07:49:28Z" level=debug msg="upgrade/artifacts -> upgrade_artifacts" +-------------------+--------+ | NAME | RESULT | +-------------------+--------+ | upgrade_artifacts | passed | +-------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/sidecar.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/streaming.xml + '[' 2 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/ui.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/upgrade.xml + '[' 0 -gt 0 ']' + '[' 1 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests'