% Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 3831 100 3831 0 0 24521 0 --:--:-- --:--:-- --:--:-- 24557 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 2144 100 2144 0 0 12374 0 --:--:-- --:--:-- --:--:-- 12393 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 350 100 350 0 0 2826 0 --:--:-- --:--:-- --:--:-- 2822 100 350 100 350 0 0 2825 0 --:--:-- --:--:-- --:--:-- 2822 Installing kuttl Try 0... curl -sLo /tmp/jaeger-tests/hack/install/../../bin/kubectl-kuttl https://github.com/kudobuilder/kuttl/releases/download/v0.15.0/kubectl-kuttl_0.15.0_linux_x86_64 KUBECONFIG file is: /tmp/kubeconfig-3595368357 for suite in elasticsearch examples generate miscellaneous sidecar streaming ui upgrade; do \ make run-e2e-tests-$suite ; \ done make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh elasticsearch false true + '[' 3 -ne 3 ']' + test_suite_name=elasticsearch + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/elasticsearch.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-elasticsearch make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true \ KAFKA_VERSION=0.32.0 \ SKIP_KAFKA=false \ ./tests/e2e/elasticsearch/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-29-231104 True False 6m44s Cluster version is 4.14.0-0.nightly-2023-09-29-231104' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-29-231104 True False 6m44s Cluster version is 4.14.0-0.nightly-2023-09-29-231104' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 0.32.0 ']' ++ version_le 0.32.0 0.25.0 +++ echo 0.32.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 0.32.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/elasticsearch/render.sh ++ export SUITE_DIR=./tests/e2e/elasticsearch ++ SUITE_DIR=./tests/e2e/elasticsearch ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/elasticsearch ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + start_test es-from-aio-to-production + '[' 1 -ne 1 ']' + test_name=es-from-aio-to-production + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-from-aio-to-production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-from-aio-to-production\e[0m' Rendering files for test es-from-aio-to-production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-from-aio-to-production + cd es-from-aio-to-production + jaeger_name=my-jaeger + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 03 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=03 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./03-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch.redundancyPolicy="ZeroRedundancy"' ./03-install.yaml + render_smoke_test my-jaeger true 04 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=04 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./04-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test es-increasing-replicas + '[' 1 -ne 1 ']' + test_name=es-increasing-replicas + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-increasing-replicas' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-increasing-replicas\e[0m' Rendering files for test es-increasing-replicas + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-from-aio-to-production + '[' es-from-aio-to-production '!=' _build ']' + cd .. + mkdir -p es-increasing-replicas + cd es-increasing-replicas + jaeger_name=simple-prod + '[' true = true ']' + jaeger_deployment_mode=production_autoprovisioned + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.replicas=2 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.query.replicas=2 ./02-install.yaml + cp ./01-assert.yaml ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.replicas=2 ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .status.readyReplicas=2 ./02-assert.yaml + render_smoke_test simple-prod true 03 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=03 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./03-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + cp ./02-install.yaml ./04-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.elasticsearch.nodeCount=2 ./04-install.yaml + /tmp/jaeger-tests/bin/gomplate -f ./openshift-check-es-nodes.yaml.template -o ./05-check-es-nodes.yaml + '[' true = true ']' + skip_test es-index-cleaner-upstream 'SKIP_ES_EXTERNAL is true' + '[' 2 -ne 2 ']' + test_name=es-index-cleaner-upstream + message='SKIP_ES_EXTERNAL is true' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-increasing-replicas + '[' es-increasing-replicas '!=' _build ']' + cd .. + rm -rf es-index-cleaner-upstream + warning 'es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true\e[0m' WAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true + '[' true = true ']' + es_index_cleaner -autoprov production_autoprovisioned + '[' 2 -ne 2 ']' + postfix=-autoprov + jaeger_deployment_strategy=production_autoprovisioned + start_test es-index-cleaner-autoprov + '[' 1 -ne 1 ']' + test_name=es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-index-cleaner-autoprov' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-index-cleaner-autoprov\e[0m' Rendering files for test es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-index-cleaner-autoprov + cd es-index-cleaner-autoprov + jaeger_name=test-es-index-cleaner-with-prefix + cronjob_name=test-es-index-cleaner-with-prefix-es-index-cleaner + secured_es_connection=false + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_managed_es ']' + ELASTICSEARCH_URL=https://elasticsearch + secured_es_connection=true + cp ../../es-index-cleaner-upstream/04-assert.yaml ../../es-index-cleaner-upstream/README.md . + render_install_jaeger test-es-index-cleaner-with-prefix production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options.es.index-prefix=""' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.enabled=false ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.numberOfDays=0 ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.esIndexCleaner.schedule="*/1 * * * *"' ./01-install.yaml + render_report_spans test-es-index-cleaner-with-prefix true 5 00 true 02 + '[' 6 -ne 6 ']' + jaeger=test-es-index-cleaner-with-prefix + is_secured=true + number_of_spans=5 + job_number=00 + ensure_reported_spans=true + test_step=02 + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + export JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + export JOB_NUMBER=00 + JOB_NUMBER=00 + export DAYS=5 + DAYS=5 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./02-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./02-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + sed 's~enabled: false~enabled: true~gi' ./01-install.yaml + CRONJOB_NAME=test-es-index-cleaner-with-prefix-es-index-cleaner + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/wait-for-cronjob-execution.yaml.template -o ./04-wait-es-index-cleaner.yaml + /tmp/jaeger-tests/bin/gomplate -f ./01-install.yaml -o ./05-install.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' 00 06 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + job_number=00 + test_step=06 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=test-es-index-cleaner-with-prefix-curator + JOB_NUMBER=00 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + MOUNT_SECRET=test-es-index-cleaner-with-prefix-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./06-check-indices.yaml + JOB_NUMBER=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./06-assert.yaml + '[' true = true ']' + get_elasticsearch_openshift_operator_version + export ESO_OPERATOR_VERSION + '[' true = true ']' ++ kubectl get pods -l name=elasticsearch-operator --all-namespaces '-o=jsonpath={.items[0].metadata.annotations.operatorframework\.io/properties}' + properties='{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.6"}}]}' + '[' -z '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.6"}}]}' ']' ++ echo '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.6"}}]}' ++ /tmp/jaeger-tests/bin/yq e -P '.properties.[] | select(.value.packageName == "elasticsearch-operator") | .value.version' + ESO_OPERATOR_VERSION=5.7.6 ++ version_ge 5.7.6 5.4 +++ echo 5.7.6 5.4 +++ tr ' ' '\n' +++ sort -rV +++ head -n 1 ++ test 5.7.6 == 5.7.6 + '[' -n '' ']' + skip_test es-index-cleaner-managed 'Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 2 -ne 2 ']' + test_name=es-index-cleaner-managed + message='Test only supported with Elasticsearch OpenShift Operator >= 5.4' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-index-cleaner-autoprov + '[' es-index-cleaner-autoprov '!=' _build ']' + cd .. + rm -rf es-index-cleaner-managed + warning 'es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4\e[0m' WAR: es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4 + '[' true = true ']' + start_test es-multiinstance + '[' 1 -ne 1 ']' + test_name=es-multiinstance + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-multiinstance' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-multiinstance\e[0m' Rendering files for test es-multiinstance + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-multiinstance + cd es-multiinstance + jaeger_name=instance-1 + render_install_jaeger instance-1 production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=instance-1 + JAEGER_NAME=instance-1 + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/gomplate -f ./03-create-second-instance.yaml.template -o 03-create-second-instance.yaml + '[' true = true ']' + skip_test es-rollover-upstream 'SKIP_ES_EXTERNAL is true' + '[' 2 -ne 2 ']' + test_name=es-rollover-upstream + message='SKIP_ES_EXTERNAL is true' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-multiinstance + '[' es-multiinstance '!=' _build ']' + cd .. + rm -rf es-rollover-upstream + warning 'es-rollover-upstream: SKIP_ES_EXTERNAL is true' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-rollover-upstream: SKIP_ES_EXTERNAL is true\e[0m' WAR: es-rollover-upstream: SKIP_ES_EXTERNAL is true + '[' true = true ']' + es_rollover -autoprov production_autoprovisioned + '[' 2 -ne 2 ']' + postfix=-autoprov + jaeger_deployment_strategy=production_autoprovisioned + start_test es-rollover-autoprov + '[' 1 -ne 1 ']' + test_name=es-rollover-autoprov + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-rollover-autoprov' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-rollover-autoprov\e[0m' Rendering files for test es-rollover-autoprov + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-rollover-autoprov + cd es-rollover-autoprov + cp ../../es-rollover-upstream/05-assert.yaml ../../es-rollover-upstream/05-install.yaml ../../es-rollover-upstream/README.md . + jaeger_name=my-jaeger + secured_es_connection=false + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_managed_es ']' + ELASTICSEARCH_URL=https://elasticsearch + secured_es_connection=true + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_report_spans my-jaeger true 2 00 true 02 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=00 + ensure_reported_spans=true + test_step=02 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=00 + JOB_NUMBER=00 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./02-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./02-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' 00 03 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + job_number=00 + test_step=03 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=00 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./03-check-indices.yaml + JOB_NUMBER=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./03-assert.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' 01 04 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + job_number=01 + test_step=04 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=01 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./04-check-indices.yaml + JOB_NUMBER=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./04-assert.yaml + render_report_spans my-jaeger true 2 02 true 06 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=02 + ensure_reported_spans=true + test_step=06 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=02 + JOB_NUMBER=02 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./06-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./06-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' 02 07 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + job_number=02 + test_step=07 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=02 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./07-check-indices.yaml + JOB_NUMBER=02 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./07-assert.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' 03 08 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + job_number=03 + test_step=08 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=03 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./08-check-indices.yaml + JOB_NUMBER=03 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./08-assert.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' 04 09 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + job_number=04 + test_step=09 + escape_command ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=04 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./09-check-indices.yaml + JOB_NUMBER=04 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./09-assert.yaml + render_report_spans my-jaeger true 2 03 true 10 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=03 + ensure_reported_spans=true + test_step=10 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=03 + JOB_NUMBER=03 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./10-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./10-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + CRONJOB_NAME=my-jaeger-es-rollover + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/wait-for-cronjob-execution.yaml.template -o ./11-wait-rollover.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-000002'\'',' 05 11 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-000002'\'',' + job_number=05 + test_step=11 + escape_command ''\''--name'\'', '\''jaeger-span-000002'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-000002'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-000002'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-000002'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-000002'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=05 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-000002'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./11-check-indices.yaml + JOB_NUMBER=05 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./11-assert.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' 06 12 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + job_number=06 + test_step=12 + escape_command ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=06 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./12-check-indices.yaml + JOB_NUMBER=06 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./12-assert.yaml + '[' true = true ']' + get_elasticsearch_openshift_operator_version + export ESO_OPERATOR_VERSION + '[' true = true ']' ++ kubectl get pods -l name=elasticsearch-operator --all-namespaces '-o=jsonpath={.items[0].metadata.annotations.operatorframework\.io/properties}' + properties='{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.6"}}]}' + '[' -z '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.6"}}]}' ']' ++ echo '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.6"}}]}' ++ /tmp/jaeger-tests/bin/yq e -P '.properties.[] | select(.value.packageName == "elasticsearch-operator") | .value.version' + ESO_OPERATOR_VERSION=5.7.6 ++ version_ge 5.7.6 5.4 +++ echo 5.7.6 5.4 +++ tr ' ' '\n' +++ sort -rV +++ head -n 1 ++ test 5.7.6 == 5.7.6 + '[' -n '' ']' + skip_test es-rollover-managed 'Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 2 -ne 2 ']' + test_name=es-rollover-managed + message='Test only supported with Elasticsearch OpenShift Operator >= 5.4' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-rollover-autoprov + '[' es-rollover-autoprov '!=' _build ']' + cd .. + rm -rf es-rollover-managed + warning 'es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4\e[0m' WAR: es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4 + '[' true = true ']' + skip_test es-spark-dependencies 'This test is not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=es-spark-dependencies + message='This test is not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + rm -rf es-spark-dependencies + warning 'es-spark-dependencies: This test is not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-spark-dependencies: This test is not supported in OpenShift\e[0m' WAR: es-spark-dependencies: This test is not supported in OpenShift + [[ true = true ]] + [[ false = false ]] + start_test es-streaming-autoprovisioned + '[' 1 -ne 1 ']' + test_name=es-streaming-autoprovisioned + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-streaming-autoprovisioned' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-streaming-autoprovisioned\e[0m' Rendering files for test es-streaming-autoprovisioned + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-streaming-autoprovisioned + cd es-streaming-autoprovisioned + jaeger_name=auto-provisioned + render_assert_kafka true auto-provisioned 00 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provisioned + test_step=00 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./1-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_smoke_test auto-provisioned true 04 + '[' 3 -ne 3 ']' + jaeger=auto-provisioned + is_secured=true + test_step=04 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://auto-provisioned-query:443 + JAEGER_QUERY_ENDPOINT=https://auto-provisioned-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://auto-provisioned-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://auto-provisioned-collector-headless:14268 + export JAEGER_NAME=auto-provisioned + JAEGER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./04-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running elasticsearch E2E tests' Running elasticsearch E2E tests + cd tests/e2e/elasticsearch/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3595368357 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 8 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/es-from-aio-to-production === PAUSE kuttl/harness/es-from-aio-to-production === RUN kuttl/harness/es-increasing-replicas === PAUSE kuttl/harness/es-increasing-replicas === RUN kuttl/harness/es-index-cleaner-autoprov === PAUSE kuttl/harness/es-index-cleaner-autoprov === RUN kuttl/harness/es-multiinstance === PAUSE kuttl/harness/es-multiinstance === RUN kuttl/harness/es-rollover-autoprov === PAUSE kuttl/harness/es-rollover-autoprov === RUN kuttl/harness/es-simple-prod === PAUSE kuttl/harness/es-simple-prod === RUN kuttl/harness/es-streaming-autoprovisioned === PAUSE kuttl/harness/es-streaming-autoprovisioned === CONT kuttl/harness/artifacts logger.go:42: 12:41:29 | artifacts | Creating namespace: kuttl-test-major-vulture logger.go:42: 12:41:29 | artifacts | artifacts events from ns kuttl-test-major-vulture: logger.go:42: 12:41:29 | artifacts | Deleting namespace: kuttl-test-major-vulture === CONT kuttl/harness/es-multiinstance logger.go:42: 12:41:35 | es-multiinstance | Ignoring 03-create-second-instance.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 12:41:35 | es-multiinstance | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 12:41:35 | es-multiinstance | Creating namespace: kuttl-test-moving-treefrog logger.go:42: 12:41:35 | es-multiinstance/0-clear-namespace | starting test step 0-clear-namespace logger.go:42: 12:41:35 | es-multiinstance/0-clear-namespace | running command: [sh -c kubectl delete namespace jaeger-e2e-multiinstance-test --ignore-not-found=true] logger.go:42: 12:41:36 | es-multiinstance/0-clear-namespace | test step completed 0-clear-namespace logger.go:42: 12:41:36 | es-multiinstance/1-install | starting test step 1-install logger.go:42: 12:41:36 | es-multiinstance/1-install | Jaeger:kuttl-test-moving-treefrog/instance-1 created logger.go:42: 12:42:35 | es-multiinstance/1-install | test step completed 1-install logger.go:42: 12:42:35 | es-multiinstance/2-create-namespace | starting test step 2-create-namespace logger.go:42: 12:42:35 | es-multiinstance/2-create-namespace | running command: [sh -c kubectl create namespace jaeger-e2e-multiinstance-test] logger.go:42: 12:42:35 | es-multiinstance/2-create-namespace | namespace/jaeger-e2e-multiinstance-test created logger.go:42: 12:42:35 | es-multiinstance/2-create-namespace | test step completed 2-create-namespace logger.go:42: 12:42:35 | es-multiinstance/3-create-second-instance | starting test step 3-create-second-instance logger.go:42: 12:42:35 | es-multiinstance/3-create-second-instance | running command: [sh -c kubectl apply -f ./01-install.yaml -n jaeger-e2e-multiinstance-test] logger.go:42: 12:42:38 | es-multiinstance/3-create-second-instance | jaeger.jaegertracing.io/instance-1 created logger.go:42: 12:42:38 | es-multiinstance/3-create-second-instance | running command: [sh -c /tmp/jaeger-tests/bin/kubectl-kuttl assert ./01-assert.yaml -n jaeger-e2e-multiinstance-test --timeout 1000] logger.go:42: 12:43:32 | es-multiinstance/3-create-second-instance | assert is valid logger.go:42: 12:43:32 | es-multiinstance/3-create-second-instance | test step completed 3-create-second-instance logger.go:42: 12:43:32 | es-multiinstance/4-check-secrets | starting test step 4-check-secrets logger.go:42: 12:43:32 | es-multiinstance/4-check-secrets | running command: [sh -c kubectl get secrets elasticsearch -o jsonpath='{.data.logging-es\.crt}' -n $NAMESPACE > secret1] logger.go:42: 12:43:32 | es-multiinstance/4-check-secrets | running command: [sh -c kubectl get secrets elasticsearch -o jsonpath='{.data.logging-es\.crt}' -n jaeger-e2e-multiinstance-test > secret2] logger.go:42: 12:43:32 | es-multiinstance/4-check-secrets | running command: [sh -c cmp --silent secret1 secret2 || exit 0] logger.go:42: 12:43:32 | es-multiinstance/4-check-secrets | test step completed 4-check-secrets logger.go:42: 12:43:32 | es-multiinstance/5-delete | starting test step 5-delete logger.go:42: 12:43:32 | es-multiinstance/5-delete | running command: [sh -c kubectl delete namespace jaeger-e2e-multiinstance-test --wait=false] logger.go:42: 12:43:32 | es-multiinstance/5-delete | namespace "jaeger-e2e-multiinstance-test" deleted logger.go:42: 12:43:32 | es-multiinstance/5-delete | test step completed 5-delete logger.go:42: 12:43:32 | es-multiinstance | es-multiinstance events from ns kuttl-test-moving-treefrog: logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:41:42 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestmovingtreefroginstance1-1-7c57956bd7 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestmovingtreefroginstance1-1-7c579phjw9 replicaset-controller logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:41:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmovingtreefroginstance1-1-7c579phjw9 Scheduled Successfully assigned kuttl-test-moving-treefrog/elasticsearch-cdm-kuttltestmovingtreefroginstance1-1-7c579phjw9 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:41:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmovingtreefroginstance1-1-7c579phjw9 AddedInterface Add eth0 [10.131.0.23/23] from ovn-kubernetes logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:41:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmovingtreefroginstance1-1-7c579phjw9.spec.containers{elasticsearch} Pulling Pulling image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:41:42 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestmovingtreefroginstance1-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestmovingtreefroginstance1-1-7c57956bd7 to 1 deployment-controller logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:41:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmovingtreefroginstance1-1-7c579phjw9.spec.containers{elasticsearch} Pulled Successfully pulled image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" in 6.529978789s (6.529994269s including waiting) kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:41:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmovingtreefroginstance1-1-7c579phjw9.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:41:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmovingtreefroginstance1-1-7c579phjw9.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:41:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmovingtreefroginstance1-1-7c579phjw9.spec.containers{proxy} Pulling Pulling image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:41:53 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmovingtreefroginstance1-1-7c579phjw9.spec.containers{proxy} Pulled Successfully pulled image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" in 3.638266191s (3.638276561s including waiting) kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:41:53 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmovingtreefroginstance1-1-7c579phjw9.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:41:53 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmovingtreefroginstance1-1-7c579phjw9.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:03 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestmovingtreefroginstance1-1-7c579phjw9.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:14 +0000 UTC Normal Pod instance-1-collector-5dd4d98b8-kkv2z Scheduled Successfully assigned kuttl-test-moving-treefrog/instance-1-collector-5dd4d98b8-kkv2z to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:14 +0000 UTC Normal Pod instance-1-collector-5dd4d98b8-kkv2z AddedInterface Add eth0 [10.129.2.24/23] from ovn-kubernetes logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:14 +0000 UTC Normal Pod instance-1-collector-5dd4d98b8-kkv2z.spec.containers{jaeger-collector} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:14 +0000 UTC Normal ReplicaSet.apps instance-1-collector-5dd4d98b8 SuccessfulCreate Created pod: instance-1-collector-5dd4d98b8-kkv2z replicaset-controller logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:14 +0000 UTC Normal Deployment.apps instance-1-collector ScalingReplicaSet Scaled up replica set instance-1-collector-5dd4d98b8 to 1 deployment-controller logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:14 +0000 UTC Normal Pod instance-1-query-6fccb944c8-7g9cl Scheduled Successfully assigned kuttl-test-moving-treefrog/instance-1-query-6fccb944c8-7g9cl to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:14 +0000 UTC Normal ReplicaSet.apps instance-1-query-6fccb944c8 SuccessfulCreate Created pod: instance-1-query-6fccb944c8-7g9cl replicaset-controller logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:14 +0000 UTC Normal Deployment.apps instance-1-query ScalingReplicaSet Scaled up replica set instance-1-query-6fccb944c8 to 1 deployment-controller logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:15 +0000 UTC Warning Pod instance-1-query-6fccb944c8-7g9cl FailedMount MountVolume.SetUp failed for volume "instance-1-ui-oauth-proxy-tls" : failed to sync secret cache: timed out waiting for the condition kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:15 +0000 UTC Warning Pod instance-1-query-6fccb944c8-7g9cl FailedMount MountVolume.SetUp failed for volume "kube-api-access-kh5j2" : failed to sync configmap cache: timed out waiting for the condition kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:16 +0000 UTC Normal Pod instance-1-query-6fccb944c8-7g9cl AddedInterface Add eth0 [10.128.2.17/23] from ovn-kubernetes logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:16 +0000 UTC Normal Pod instance-1-query-6fccb944c8-7g9cl.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:19 +0000 UTC Normal Pod instance-1-query-6fccb944c8-7g9cl.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" in 2.958278684s (2.958297064s including waiting) kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:19 +0000 UTC Normal Pod instance-1-query-6fccb944c8-7g9cl.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:19 +0000 UTC Normal Pod instance-1-query-6fccb944c8-7g9cl.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:19 +0000 UTC Normal Pod instance-1-query-6fccb944c8-7g9cl.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:19 +0000 UTC Normal Pod instance-1-query-6fccb944c8-7g9cl.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:19 +0000 UTC Normal Pod instance-1-query-6fccb944c8-7g9cl.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:19 +0000 UTC Normal Pod instance-1-query-6fccb944c8-7g9cl.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:22 +0000 UTC Normal Pod instance-1-query-6fccb944c8-7g9cl.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" in 3.293502165s (3.293510845s including waiting) kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:22 +0000 UTC Normal Pod instance-1-query-6fccb944c8-7g9cl.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:22 +0000 UTC Normal Pod instance-1-query-6fccb944c8-7g9cl.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:29 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:29 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:29 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:34 +0000 UTC Normal Pod instance-1-collector-5dd4d98b8-kkv2z.spec.containers{jaeger-collector} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" in 19.989940725s (19.989954525s including waiting) kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:34 +0000 UTC Normal Pod instance-1-collector-5dd4d98b8-kkv2z.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:42:34 +0000 UTC Normal Pod instance-1-collector-5dd4d98b8-kkv2z.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:43:16 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:43:16 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod instance-1-collector-5dd4d98b8-kkv2z horizontal-pod-autoscaler logger.go:42: 12:43:32 | es-multiinstance | 2023-10-02 12:43:16 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 12:43:32 | es-multiinstance | Deleting namespace: kuttl-test-moving-treefrog === CONT kuttl/harness/es-streaming-autoprovisioned logger.go:42: 12:43:39 | es-streaming-autoprovisioned | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 12:43:39 | es-streaming-autoprovisioned | Creating namespace: kuttl-test-adapting-newt logger.go:42: 12:43:39 | es-streaming-autoprovisioned/0-install | starting test step 0-install logger.go:42: 12:43:39 | es-streaming-autoprovisioned/0-install | Jaeger:kuttl-test-adapting-newt/auto-provisioned created logger.go:42: 12:44:55 | es-streaming-autoprovisioned/0-install | test step completed 0-install logger.go:42: 12:44:55 | es-streaming-autoprovisioned/1- | starting test step 1- logger.go:42: 12:51:55 | es-streaming-autoprovisioned/1- | test step failed 1- case.go:364: failed in step 1- case.go:366: strimzipodsets.core.strimzi.io "auto-provisioned-kafka" not found logger.go:42: 12:51:55 | es-streaming-autoprovisioned | es-streaming-autoprovisioned events from ns kuttl-test-adapting-newt: logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:43:45 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestadaptingnewtautoprovisioned-1-868cd6d7d7 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestadaptingnewtautoprovisioned-1-8vrsk6 replicaset-controller logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:43:45 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestadaptingnewtautoprovisioned-1-8vrsk6 Scheduled Successfully assigned kuttl-test-adapting-newt/elasticsearch-cdm-kuttltestadaptingnewtautoprovisioned-1-8vrsk6 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:43:45 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestadaptingnewtautoprovisioned-1-8vrsk6 AddedInterface Add eth0 [10.129.2.26/23] from ovn-kubernetes logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:43:45 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestadaptingnewtautoprovisioned-1-8vrsk6.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:43:45 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestadaptingnewtautoprovisioned-1-8vrsk6.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:43:45 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestadaptingnewtautoprovisioned-1-8vrsk6.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:43:45 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestadaptingnewtautoprovisioned-1-8vrsk6.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:43:45 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestadaptingnewtautoprovisioned-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestadaptingnewtautoprovisioned-1-868cd6d7d7 to 1 deployment-controller logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:43:46 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestadaptingnewtautoprovisioned-1-8vrsk6.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:43:46 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestadaptingnewtautoprovisioned-1-8vrsk6.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:43:55 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestadaptingnewtautoprovisioned-1-8vrsk6.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:44:01 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestadaptingnewtautoprovisioned-1-8vrsk6.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:44:14 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-zookeeper NoPods No matching pods found controllermanager logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:44:14 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:44:14 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-adapting-newt/data-auto-provisioned-zookeeper-0" logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:44:14 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ExternalProvisioning waiting for a volume to be created, either by external provisioner "ebs.csi.aws.com" or manually created by system administrator persistentvolume-controller logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:44:18 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 Scheduled Successfully assigned kuttl-test-adapting-newt/auto-provisioned-zookeeper-0 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:44:18 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-81512642-0f51-4785-99bc-965005815a2b logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:44:21 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-81512642-0f51-4785-99bc-965005815a2b" attachdetach-controller logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:44:25 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 AddedInterface Add eth0 [10.131.0.24/23] from ovn-kubernetes logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:44:25 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulling Pulling image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" kubelet logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:44:33 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulled Successfully pulled image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" in 8.395422878s (8.395437329s including waiting) kubelet logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:44:33 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 12:51:55 | es-streaming-autoprovisioned | 2023-10-02 12:44:33 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 12:51:55 | es-streaming-autoprovisioned | Deleting namespace: kuttl-test-adapting-newt === CONT kuttl/harness/es-simple-prod logger.go:42: 12:52:07 | es-simple-prod | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 12:52:07 | es-simple-prod | Creating namespace: kuttl-test-normal-lark logger.go:42: 12:52:07 | es-simple-prod | es-simple-prod events from ns kuttl-test-normal-lark: logger.go:42: 12:52:07 | es-simple-prod | Deleting namespace: kuttl-test-normal-lark === CONT kuttl/harness/es-rollover-autoprov logger.go:42: 12:52:13 | es-rollover-autoprov | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 12:52:13 | es-rollover-autoprov | Creating namespace: kuttl-test-lucky-airedale logger.go:42: 12:52:13 | es-rollover-autoprov/1-install | starting test step 1-install logger.go:42: 12:52:13 | es-rollover-autoprov/1-install | Jaeger:kuttl-test-lucky-airedale/my-jaeger created logger.go:42: 12:52:52 | es-rollover-autoprov/1-install | test step completed 1-install logger.go:42: 12:52:52 | es-rollover-autoprov/2-report-spans | starting test step 2-report-spans logger.go:42: 12:52:52 | es-rollover-autoprov/2-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 12:52:54 | es-rollover-autoprov/2-report-spans | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 12:53:00 | es-rollover-autoprov/2-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 JOB_NUMBER=00 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-00-job.yaml] logger.go:42: 12:53:01 | es-rollover-autoprov/2-report-spans | running command: [sh -c kubectl apply -f report-span-00-job.yaml -n $NAMESPACE] logger.go:42: 12:53:02 | es-rollover-autoprov/2-report-spans | job.batch/00-report-span created logger.go:42: 12:53:28 | es-rollover-autoprov/2-report-spans | test step completed 2-report-spans logger.go:42: 12:53:28 | es-rollover-autoprov/3-check-indices | starting test step 3-check-indices logger.go:42: 12:53:28 | es-rollover-autoprov/3-check-indices | Job:kuttl-test-lucky-airedale/00-check-indices created logger.go:42: 12:53:31 | es-rollover-autoprov/3-check-indices | test step completed 3-check-indices logger.go:42: 12:53:31 | es-rollover-autoprov/4-check-indices | starting test step 4-check-indices logger.go:42: 12:53:31 | es-rollover-autoprov/4-check-indices | Job:kuttl-test-lucky-airedale/01-check-indices created logger.go:42: 12:53:35 | es-rollover-autoprov/4-check-indices | test step completed 4-check-indices logger.go:42: 12:53:35 | es-rollover-autoprov/5-install | starting test step 5-install logger.go:42: 12:53:35 | es-rollover-autoprov/5-install | Jaeger:kuttl-test-lucky-airedale/my-jaeger updated logger.go:42: 12:53:58 | es-rollover-autoprov/5-install | test step completed 5-install logger.go:42: 12:53:58 | es-rollover-autoprov/6-report-spans | starting test step 6-report-spans logger.go:42: 12:53:58 | es-rollover-autoprov/6-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 12:54:06 | es-rollover-autoprov/6-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 JOB_NUMBER=02 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-02-job.yaml] logger.go:42: 12:54:06 | es-rollover-autoprov/6-report-spans | running command: [sh -c kubectl apply -f report-span-02-job.yaml -n $NAMESPACE] logger.go:42: 12:54:06 | es-rollover-autoprov/6-report-spans | job.batch/02-report-span created logger.go:42: 12:54:30 | es-rollover-autoprov/6-report-spans | test step completed 6-report-spans logger.go:42: 12:54:30 | es-rollover-autoprov/7-check-indices | starting test step 7-check-indices logger.go:42: 12:54:31 | es-rollover-autoprov/7-check-indices | Job:kuttl-test-lucky-airedale/02-check-indices created logger.go:42: 12:54:35 | es-rollover-autoprov/7-check-indices | test step completed 7-check-indices logger.go:42: 12:54:35 | es-rollover-autoprov/8-check-indices | starting test step 8-check-indices logger.go:42: 12:54:35 | es-rollover-autoprov/8-check-indices | Job:kuttl-test-lucky-airedale/03-check-indices created logger.go:42: 12:54:39 | es-rollover-autoprov/8-check-indices | test step completed 8-check-indices logger.go:42: 12:54:39 | es-rollover-autoprov/9-check-indices | starting test step 9-check-indices logger.go:42: 12:54:39 | es-rollover-autoprov/9-check-indices | Job:kuttl-test-lucky-airedale/04-check-indices created logger.go:42: 12:54:43 | es-rollover-autoprov/9-check-indices | test step completed 9-check-indices logger.go:42: 12:54:43 | es-rollover-autoprov/10-report-spans | starting test step 10-report-spans logger.go:42: 12:54:43 | es-rollover-autoprov/10-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 12:54:50 | es-rollover-autoprov/10-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 JOB_NUMBER=03 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-03-job.yaml] logger.go:42: 12:54:51 | es-rollover-autoprov/10-report-spans | running command: [sh -c kubectl apply -f report-span-03-job.yaml -n $NAMESPACE] logger.go:42: 12:54:51 | es-rollover-autoprov/10-report-spans | job.batch/03-report-span created logger.go:42: 12:55:15 | es-rollover-autoprov/10-report-spans | test step completed 10-report-spans logger.go:42: 12:55:15 | es-rollover-autoprov/11-check-indices | starting test step 11-check-indices logger.go:42: 12:55:15 | es-rollover-autoprov/11-check-indices | running command: [sh -c go run ../../../../cmd-utils/wait-cronjob/main.go --cronjob my-jaeger-es-rollover --namespace $NAMESPACE] logger.go:42: 12:55:26 | es-rollover-autoprov/11-check-indices | time="2023-10-02T12:55:26Z" level=debug msg="Checking if the my-jaeger-es-rollover CronJob exists" logger.go:42: 12:55:26 | es-rollover-autoprov/11-check-indices | time="2023-10-02T12:55:26Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 12:55:26 | es-rollover-autoprov/11-check-indices | time="2023-10-02T12:55:26Z" level=info msg="Cronjob my-jaeger-es-rollover found successfully" logger.go:42: 12:55:26 | es-rollover-autoprov/11-check-indices | time="2023-10-02T12:55:26Z" level=debug msg="Waiting for the next scheduled job from my-jaeger-es-rollover cronjob" logger.go:42: 12:55:26 | es-rollover-autoprov/11-check-indices | time="2023-10-02T12:55:26Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 12:55:26 | es-rollover-autoprov/11-check-indices | time="2023-10-02T12:55:26Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 12:55:36 | es-rollover-autoprov/11-check-indices | time="2023-10-02T12:55:36Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 12:55:46 | es-rollover-autoprov/11-check-indices | time="2023-10-02T12:55:46Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 12:55:56 | es-rollover-autoprov/11-check-indices | time="2023-10-02T12:55:56Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 12:56:06 | es-rollover-autoprov/11-check-indices | time="2023-10-02T12:56:06Z" level=info msg="Job of owner my-jaeger-es-rollover succeeded after my-jaeger-es-rollover 40.046678191s" logger.go:42: 12:56:06 | es-rollover-autoprov/11-check-indices | Job:kuttl-test-lucky-airedale/05-check-indices created logger.go:42: 12:56:09 | es-rollover-autoprov/11-check-indices | test step completed 11-check-indices logger.go:42: 12:56:09 | es-rollover-autoprov/12-check-indices | starting test step 12-check-indices logger.go:42: 12:56:09 | es-rollover-autoprov/12-check-indices | Job:kuttl-test-lucky-airedale/06-check-indices created logger.go:42: 12:56:13 | es-rollover-autoprov/12-check-indices | test step completed 12-check-indices logger.go:42: 12:56:13 | es-rollover-autoprov | es-rollover-autoprov events from ns kuttl-test-lucky-airedale: logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:20 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestluckyairedalemyjaeger-1-79465bc6d5 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestluckyairedalemyjaeger-1-79465bchkmjf replicaset-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestluckyairedalemyjaeger-1-79465bchkmjf Scheduled Successfully assigned kuttl-test-lucky-airedale/elasticsearch-cdm-kuttltestluckyairedalemyjaeger-1-79465bchkmjf to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestluckyairedalemyjaeger-1-79465bchkmjf AddedInterface Add eth0 [10.131.0.26/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestluckyairedalemyjaeger-1-79465bchkmjf.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestluckyairedalemyjaeger-1-79465bchkmjf.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestluckyairedalemyjaeger-1-79465bchkmjf.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestluckyairedalemyjaeger-1-79465bchkmjf.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestluckyairedalemyjaeger-1-79465bchkmjf.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestluckyairedalemyjaeger-1-79465bchkmjf.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:20 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestluckyairedalemyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestluckyairedalemyjaeger-1-79465bc6d5 to 1 deployment-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:30 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestluckyairedalemyjaeger-1-79465bchkmjf.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:35 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestluckyairedalemyjaeger-1-79465bchkmjf.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:47 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-g8d9j Scheduled Successfully assigned kuttl-test-lucky-airedale/my-jaeger-collector-558ccfc8dd-g8d9j to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:47 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-g8d9j AddedInterface Add eth0 [10.129.2.27/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:47 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-g8d9j.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:47 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-g8d9j.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:47 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-g8d9j.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:47 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-558ccfc8dd SuccessfulCreate Created pod: my-jaeger-collector-558ccfc8dd-g8d9j replicaset-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:47 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-558ccfc8dd to 1 deployment-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:47 +0000 UTC Normal Pod my-jaeger-query-696d88595-v4zw5 Scheduled Successfully assigned kuttl-test-lucky-airedale/my-jaeger-query-696d88595-v4zw5 to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:47 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-696d88595 SuccessfulCreate Created pod: my-jaeger-query-696d88595-v4zw5 replicaset-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:47 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-696d88595 to 1 deployment-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:48 +0000 UTC Warning Pod my-jaeger-query-696d88595-v4zw5 FailedMount MountVolume.SetUp failed for volume "my-jaeger-trusted-ca" : failed to sync configmap cache: timed out waiting for the condition kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:48 +0000 UTC Warning Pod my-jaeger-query-696d88595-v4zw5 FailedMount MountVolume.SetUp failed for volume "my-jaeger-ui-oauth-proxy-tls" : failed to sync secret cache: timed out waiting for the condition kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:48 +0000 UTC Warning Pod my-jaeger-query-696d88595-v4zw5 FailedMount MountVolume.SetUp failed for volume "my-jaeger-ui-configuration-volume" : failed to sync configmap cache: timed out waiting for the condition kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:48 +0000 UTC Warning Pod my-jaeger-query-696d88595-v4zw5 FailedMount MountVolume.SetUp failed for volume "certs" : failed to sync secret cache: timed out waiting for the condition kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:49 +0000 UTC Normal Pod my-jaeger-query-696d88595-v4zw5 AddedInterface Add eth0 [10.128.2.21/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:49 +0000 UTC Normal Pod my-jaeger-query-696d88595-v4zw5.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:49 +0000 UTC Normal Pod my-jaeger-query-696d88595-v4zw5.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:49 +0000 UTC Normal Pod my-jaeger-query-696d88595-v4zw5.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:49 +0000 UTC Normal Pod my-jaeger-query-696d88595-v4zw5.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:49 +0000 UTC Normal Pod my-jaeger-query-696d88595-v4zw5.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:49 +0000 UTC Normal Pod my-jaeger-query-696d88595-v4zw5.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:49 +0000 UTC Normal Pod my-jaeger-query-696d88595-v4zw5.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:49 +0000 UTC Normal Pod my-jaeger-query-696d88595-v4zw5.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:49 +0000 UTC Normal Pod my-jaeger-query-696d88595-v4zw5.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:56 +0000 UTC Normal Pod my-jaeger-query-696d88595-v4zw5.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:56 +0000 UTC Normal Pod my-jaeger-query-696d88595-v4zw5.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:56 +0000 UTC Normal Pod my-jaeger-query-696d88595-v4zw5.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:56 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-696d88595 SuccessfulDelete Deleted pod: my-jaeger-query-696d88595-v4zw5 replicaset-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:56 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-696d88595 to 0 from 1 deployment-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:57 +0000 UTC Normal Pod my-jaeger-query-5cbf69cdf7-6d88k Scheduled Successfully assigned kuttl-test-lucky-airedale/my-jaeger-query-5cbf69cdf7-6d88k to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:57 +0000 UTC Normal Pod my-jaeger-query-5cbf69cdf7-6d88k AddedInterface Add eth0 [10.128.2.22/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:57 +0000 UTC Normal Pod my-jaeger-query-5cbf69cdf7-6d88k.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:57 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-5cbf69cdf7 SuccessfulCreate Created pod: my-jaeger-query-5cbf69cdf7-6d88k replicaset-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:57 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-5cbf69cdf7 to 1 deployment-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:58 +0000 UTC Normal Pod my-jaeger-query-5cbf69cdf7-6d88k.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:58 +0000 UTC Normal Pod my-jaeger-query-5cbf69cdf7-6d88k.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:58 +0000 UTC Normal Pod my-jaeger-query-5cbf69cdf7-6d88k.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:58 +0000 UTC Normal Pod my-jaeger-query-5cbf69cdf7-6d88k.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:58 +0000 UTC Normal Pod my-jaeger-query-5cbf69cdf7-6d88k.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:58 +0000 UTC Normal Pod my-jaeger-query-5cbf69cdf7-6d88k.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:58 +0000 UTC Normal Pod my-jaeger-query-5cbf69cdf7-6d88k.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:52:58 +0000 UTC Normal Pod my-jaeger-query-5cbf69cdf7-6d88k.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:02 +0000 UTC Normal Pod 00-report-span-xv6pn Scheduled Successfully assigned kuttl-test-lucky-airedale/00-report-span-xv6pn to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:02 +0000 UTC Normal Job.batch 00-report-span SuccessfulCreate Created pod: 00-report-span-xv6pn job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:02 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:02 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:02 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:03 +0000 UTC Normal Pod 00-report-span-xv6pn AddedInterface Add eth0 [10.129.2.28/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:03 +0000 UTC Normal Pod 00-report-span-xv6pn.spec.containers{asserts-container} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:05 +0000 UTC Normal Pod 00-report-span-xv6pn.spec.containers{asserts-container} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" in 1.418891704s (1.418913214s including waiting) kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:05 +0000 UTC Normal Pod 00-report-span-xv6pn.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:05 +0000 UTC Normal Pod 00-report-span-xv6pn.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:27 +0000 UTC Normal Job.batch 00-report-span Completed Job completed job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:28 +0000 UTC Normal Pod 00-check-indices-cpfgz Scheduled Successfully assigned kuttl-test-lucky-airedale/00-check-indices-cpfgz to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:28 +0000 UTC Normal Pod 00-check-indices-cpfgz AddedInterface Add eth0 [10.129.2.29/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:28 +0000 UTC Normal Pod 00-check-indices-cpfgz.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:28 +0000 UTC Normal Pod 00-check-indices-cpfgz.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:28 +0000 UTC Normal Pod 00-check-indices-cpfgz.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:28 +0000 UTC Normal Job.batch 00-check-indices SuccessfulCreate Created pod: 00-check-indices-cpfgz job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:30 +0000 UTC Normal Job.batch 00-check-indices Completed Job completed job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:31 +0000 UTC Normal Pod 01-check-indices-lcwnh Scheduled Successfully assigned kuttl-test-lucky-airedale/01-check-indices-lcwnh to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:31 +0000 UTC Normal Pod 01-check-indices-lcwnh AddedInterface Add eth0 [10.129.2.30/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:31 +0000 UTC Normal Pod 01-check-indices-lcwnh.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:31 +0000 UTC Normal Pod 01-check-indices-lcwnh.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:31 +0000 UTC Normal Pod 01-check-indices-lcwnh.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:31 +0000 UTC Normal Job.batch 01-check-indices SuccessfulCreate Created pod: 01-check-indices-lcwnh job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:35 +0000 UTC Normal Job.batch 01-check-indices Completed Job completed job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:36 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-ss49d Scheduled Successfully assigned kuttl-test-lucky-airedale/my-jaeger-es-rollover-create-mapping-ss49d to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:36 +0000 UTC Normal Job.batch my-jaeger-es-rollover-create-mapping SuccessfulCreate Created pod: my-jaeger-es-rollover-create-mapping-ss49d job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:37 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-ss49d AddedInterface Add eth0 [10.129.2.31/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:37 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-ss49d.spec.containers{my-jaeger-es-rollover-create-mapping} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod my-jaeger-collector-558ccfc8dd-g8d9j horizontal-pod-autoscaler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:55 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-ss49d.spec.containers{my-jaeger-es-rollover-create-mapping} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" in 18.38849141s (18.3885002s including waiting) kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:55 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-ss49d.spec.containers{my-jaeger-es-rollover-create-mapping} Created Created container my-jaeger-es-rollover-create-mapping kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:55 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-ss49d.spec.containers{my-jaeger-es-rollover-create-mapping} Started Started container my-jaeger-es-rollover-create-mapping kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:58 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-g8d9j.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:58 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-558ccfc8dd SuccessfulDelete Deleted pod: my-jaeger-collector-558ccfc8dd-g8d9j replicaset-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:58 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled down replica set my-jaeger-collector-558ccfc8dd to 0 from 1 deployment-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:58 +0000 UTC Normal Job.batch my-jaeger-es-rollover-create-mapping Completed Job completed job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:58 +0000 UTC Normal Pod my-jaeger-query-5cbf69cdf7-6d88k.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:58 +0000 UTC Normal Pod my-jaeger-query-5cbf69cdf7-6d88k.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:58 +0000 UTC Normal Pod my-jaeger-query-5cbf69cdf7-6d88k.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:58 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-5cbf69cdf7 SuccessfulDelete Deleted pod: my-jaeger-query-5cbf69cdf7-6d88k replicaset-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:58 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-5cbf69cdf7 to 0 from 1 deployment-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:59 +0000 UTC Normal Pod my-jaeger-collector-74dd5d98f7-gbsmj Scheduled Successfully assigned kuttl-test-lucky-airedale/my-jaeger-collector-74dd5d98f7-gbsmj to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:59 +0000 UTC Normal Pod my-jaeger-collector-74dd5d98f7-gbsmj AddedInterface Add eth0 [10.129.2.32/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:59 +0000 UTC Normal Pod my-jaeger-collector-74dd5d98f7-gbsmj.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:59 +0000 UTC Normal Pod my-jaeger-collector-74dd5d98f7-gbsmj.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:59 +0000 UTC Normal Pod my-jaeger-collector-74dd5d98f7-gbsmj.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:59 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-74dd5d98f7 SuccessfulCreate Created pod: my-jaeger-collector-74dd5d98f7-gbsmj replicaset-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:59 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-74dd5d98f7 to 1 deployment-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:59 +0000 UTC Normal Pod my-jaeger-query-5fcd7b856d-8tk2l Scheduled Successfully assigned kuttl-test-lucky-airedale/my-jaeger-query-5fcd7b856d-8tk2l to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:59 +0000 UTC Normal Pod my-jaeger-query-5fcd7b856d-8tk2l AddedInterface Add eth0 [10.128.2.23/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:59 +0000 UTC Normal Pod my-jaeger-query-5fcd7b856d-8tk2l.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:59 +0000 UTC Normal Pod my-jaeger-query-5fcd7b856d-8tk2l.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:59 +0000 UTC Normal Pod my-jaeger-query-5fcd7b856d-8tk2l.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:59 +0000 UTC Normal Pod my-jaeger-query-5fcd7b856d-8tk2l.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:59 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-5fcd7b856d SuccessfulCreate Created pod: my-jaeger-query-5fcd7b856d-8tk2l replicaset-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:53:59 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-5fcd7b856d to 1 deployment-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28270854-lr9rz Scheduled Successfully assigned kuttl-test-lucky-airedale/my-jaeger-es-lookback-28270854-lr9rz to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28270854-lr9rz AddedInterface Add eth0 [10.129.2.34/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28270854-lr9rz.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28270854-lr9rz.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28270854-lr9rz.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28270854 SuccessfulCreate Created pod: my-jaeger-es-lookback-28270854-lr9rz job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28270854 cronjob-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28270854-pmnvb Scheduled Successfully assigned kuttl-test-lucky-airedale/my-jaeger-es-rollover-28270854-pmnvb to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28270854-pmnvb AddedInterface Add eth0 [10.129.2.33/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28270854-pmnvb.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28270854-pmnvb.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28270854-pmnvb.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28270854 SuccessfulCreate Created pod: my-jaeger-es-rollover-28270854-pmnvb job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28270854 cronjob-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:00 +0000 UTC Normal Pod my-jaeger-query-5fcd7b856d-8tk2l.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:00 +0000 UTC Normal Pod my-jaeger-query-5fcd7b856d-8tk2l.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:00 +0000 UTC Normal Pod my-jaeger-query-5fcd7b856d-8tk2l.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:00 +0000 UTC Normal Pod my-jaeger-query-5fcd7b856d-8tk2l.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:00 +0000 UTC Normal Pod my-jaeger-query-5fcd7b856d-8tk2l.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28270854 Completed Job completed job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28270854, status: Complete cronjob-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28270854 Completed Job completed job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28270854, status: Complete cronjob-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:06 +0000 UTC Normal Pod 02-report-span-jjqm4 Scheduled Successfully assigned kuttl-test-lucky-airedale/02-report-span-jjqm4 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:06 +0000 UTC Normal Job.batch 02-report-span SuccessfulCreate Created pod: 02-report-span-jjqm4 job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:07 +0000 UTC Normal Pod 02-report-span-jjqm4 AddedInterface Add eth0 [10.129.2.35/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:07 +0000 UTC Normal Pod 02-report-span-jjqm4.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:07 +0000 UTC Normal Pod 02-report-span-jjqm4.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:07 +0000 UTC Normal Pod 02-report-span-jjqm4.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:30 +0000 UTC Normal Job.batch 02-report-span Completed Job completed job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:31 +0000 UTC Normal Pod 02-check-indices-vxlj8 Scheduled Successfully assigned kuttl-test-lucky-airedale/02-check-indices-vxlj8 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:31 +0000 UTC Normal Pod 02-check-indices-vxlj8 AddedInterface Add eth0 [10.129.2.36/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:31 +0000 UTC Normal Pod 02-check-indices-vxlj8.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:31 +0000 UTC Normal Pod 02-check-indices-vxlj8.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:31 +0000 UTC Normal Pod 02-check-indices-vxlj8.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:31 +0000 UTC Normal Job.batch 02-check-indices SuccessfulCreate Created pod: 02-check-indices-vxlj8 job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:34 +0000 UTC Normal Job.batch 02-check-indices Completed Job completed job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:35 +0000 UTC Normal Pod 03-check-indices-zc88b Scheduled Successfully assigned kuttl-test-lucky-airedale/03-check-indices-zc88b to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:35 +0000 UTC Normal Pod 03-check-indices-zc88b AddedInterface Add eth0 [10.129.2.37/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:35 +0000 UTC Normal Pod 03-check-indices-zc88b.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:35 +0000 UTC Normal Pod 03-check-indices-zc88b.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:35 +0000 UTC Normal Pod 03-check-indices-zc88b.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:35 +0000 UTC Normal Job.batch 03-check-indices SuccessfulCreate Created pod: 03-check-indices-zc88b job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:38 +0000 UTC Normal Job.batch 03-check-indices Completed Job completed job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:39 +0000 UTC Normal Pod 04-check-indices-6c9lr Scheduled Successfully assigned kuttl-test-lucky-airedale/04-check-indices-6c9lr to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:39 +0000 UTC Normal Pod 04-check-indices-6c9lr AddedInterface Add eth0 [10.129.2.38/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:39 +0000 UTC Normal Pod 04-check-indices-6c9lr.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:39 +0000 UTC Normal Pod 04-check-indices-6c9lr.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:39 +0000 UTC Normal Pod 04-check-indices-6c9lr.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:39 +0000 UTC Normal Job.batch 04-check-indices SuccessfulCreate Created pod: 04-check-indices-6c9lr job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:42 +0000 UTC Normal Job.batch 04-check-indices Completed Job completed job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod my-jaeger-collector-74dd5d98f7-gbsmj horizontal-pod-autoscaler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:51 +0000 UTC Normal Pod 03-report-span-g7w7f Scheduled Successfully assigned kuttl-test-lucky-airedale/03-report-span-g7w7f to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:51 +0000 UTC Normal Job.batch 03-report-span SuccessfulCreate Created pod: 03-report-span-g7w7f job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:52 +0000 UTC Normal Pod 03-report-span-g7w7f AddedInterface Add eth0 [10.129.2.39/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:52 +0000 UTC Normal Pod 03-report-span-g7w7f.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:52 +0000 UTC Normal Pod 03-report-span-g7w7f.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:54:52 +0000 UTC Normal Pod 03-report-span-g7w7f.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:55:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28270855-pxz7n Scheduled Successfully assigned kuttl-test-lucky-airedale/my-jaeger-es-lookback-28270855-pxz7n to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:55:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28270855-pxz7n AddedInterface Add eth0 [10.129.2.40/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:55:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28270855-pxz7n.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:55:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28270855-pxz7n.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:55:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28270855-pxz7n.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:55:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28270855 SuccessfulCreate Created pod: my-jaeger-es-lookback-28270855-pxz7n job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:55:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28270855 cronjob-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:55:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28270855-l9qtd Scheduled Successfully assigned kuttl-test-lucky-airedale/my-jaeger-es-rollover-28270855-l9qtd to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:55:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28270855-l9qtd AddedInterface Add eth0 [10.129.2.41/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:55:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28270855-l9qtd.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:55:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28270855-l9qtd.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:55:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28270855-l9qtd.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:55:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28270855 SuccessfulCreate Created pod: my-jaeger-es-rollover-28270855-l9qtd job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:55:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28270855 cronjob-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:55:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28270855 Completed Job completed job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:55:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28270855, status: Complete cronjob-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:55:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28270855 Completed Job completed job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:55:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28270855, status: Complete cronjob-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:55:15 +0000 UTC Normal Job.batch 03-report-span Completed Job completed job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28270856-gdlqv Scheduled Successfully assigned kuttl-test-lucky-airedale/my-jaeger-es-lookback-28270856-gdlqv to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28270856-gdlqv AddedInterface Add eth0 [10.129.2.42/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28270856-gdlqv.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28270856-gdlqv.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28270856-gdlqv.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28270856 SuccessfulCreate Created pod: my-jaeger-es-lookback-28270856-gdlqv job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28270856 cronjob-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28270856-6cl75 Scheduled Successfully assigned kuttl-test-lucky-airedale/my-jaeger-es-rollover-28270856-6cl75 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28270856-6cl75 AddedInterface Add eth0 [10.129.2.43/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28270856-6cl75.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28270856-6cl75.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28270856-6cl75.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28270856 SuccessfulCreate Created pod: my-jaeger-es-rollover-28270856-6cl75 job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28270856 cronjob-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28270856 Completed Job completed job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28270856, status: Complete cronjob-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28270856 Completed Job completed job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28270856, status: Complete cronjob-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:06 +0000 UTC Normal Pod 05-check-indices-lh2p8 Scheduled Successfully assigned kuttl-test-lucky-airedale/05-check-indices-lh2p8 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:06 +0000 UTC Normal Pod 05-check-indices-lh2p8 AddedInterface Add eth0 [10.129.2.44/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:06 +0000 UTC Normal Pod 05-check-indices-lh2p8.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:06 +0000 UTC Normal Job.batch 05-check-indices SuccessfulCreate Created pod: 05-check-indices-lh2p8 job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:07 +0000 UTC Normal Pod 05-check-indices-lh2p8.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:07 +0000 UTC Normal Pod 05-check-indices-lh2p8.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:09 +0000 UTC Normal Job.batch 05-check-indices Completed Job completed job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:09 +0000 UTC Normal Pod 06-check-indices-s2xj7 Scheduled Successfully assigned kuttl-test-lucky-airedale/06-check-indices-s2xj7 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:09 +0000 UTC Normal Job.batch 06-check-indices SuccessfulCreate Created pod: 06-check-indices-s2xj7 job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:10 +0000 UTC Normal Pod 06-check-indices-s2xj7 AddedInterface Add eth0 [10.129.2.45/23] from ovn-kubernetes logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:10 +0000 UTC Normal Pod 06-check-indices-s2xj7.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:10 +0000 UTC Normal Pod 06-check-indices-s2xj7.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:10 +0000 UTC Normal Pod 06-check-indices-s2xj7.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 12:56:13 | es-rollover-autoprov | 2023-10-02 12:56:13 +0000 UTC Normal Job.batch 06-check-indices Completed Job completed job-controller logger.go:42: 12:56:13 | es-rollover-autoprov | Deleting namespace: kuttl-test-lucky-airedale === CONT kuttl/harness/es-increasing-replicas logger.go:42: 12:56:21 | es-increasing-replicas | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 12:56:21 | es-increasing-replicas | Ignoring check-es-nodes.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 12:56:21 | es-increasing-replicas | Ignoring openshift-check-es-nodes.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 12:56:21 | es-increasing-replicas | Creating namespace: kuttl-test-peaceful-finch logger.go:42: 12:56:22 | es-increasing-replicas/1-install | starting test step 1-install logger.go:42: 12:56:22 | es-increasing-replicas/1-install | Jaeger:kuttl-test-peaceful-finch/simple-prod created logger.go:42: 12:56:59 | es-increasing-replicas/1-install | test step completed 1-install logger.go:42: 12:56:59 | es-increasing-replicas/2-install | starting test step 2-install logger.go:42: 12:56:59 | es-increasing-replicas/2-install | Jaeger:kuttl-test-peaceful-finch/simple-prod updated logger.go:42: 12:57:07 | es-increasing-replicas/2-install | test step completed 2-install logger.go:42: 12:57:07 | es-increasing-replicas/3-smoke-test | starting test step 3-smoke-test logger.go:42: 12:57:07 | es-increasing-replicas/3-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 12:57:08 | es-increasing-replicas/3-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 12:57:15 | es-increasing-replicas/3-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 12:57:15 | es-increasing-replicas/3-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 12:57:15 | es-increasing-replicas/3-smoke-test | job.batch/report-span created logger.go:42: 12:57:15 | es-increasing-replicas/3-smoke-test | job.batch/check-span created logger.go:42: 12:57:29 | es-increasing-replicas/3-smoke-test | test step completed 3-smoke-test logger.go:42: 12:57:29 | es-increasing-replicas/4-install | starting test step 4-install logger.go:42: 12:57:29 | es-increasing-replicas/4-install | Jaeger:kuttl-test-peaceful-finch/simple-prod updated logger.go:42: 12:57:29 | es-increasing-replicas/4-install | test step completed 4-install logger.go:42: 12:57:29 | es-increasing-replicas/5-check-es-nodes | starting test step 5-check-es-nodes logger.go:42: 12:57:29 | es-increasing-replicas/5-check-es-nodes | running command: [sh -c ./check-es-nodes.sh $NAMESPACE] logger.go:42: 12:57:29 | es-increasing-replicas/5-check-es-nodes | Checking if the number of ES instances is the expected logger.go:42: 12:57:29 | es-increasing-replicas/5-check-es-nodes | false logger.go:42: 12:57:29 | es-increasing-replicas/5-check-es-nodes | Error: no matches found logger.go:42: 12:57:34 | es-increasing-replicas/5-check-es-nodes | Checking if the number of ES instances is the expected logger.go:42: 12:57:34 | es-increasing-replicas/5-check-es-nodes | true logger.go:42: 12:57:34 | es-increasing-replicas/5-check-es-nodes | test step completed 5-check-es-nodes logger.go:42: 12:57:34 | es-increasing-replicas | es-increasing-replicas events from ns kuttl-test-peaceful-finch: logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:29 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-1-5c695746b5 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-1-5c695c2zgg replicaset-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-1-5c695c2zgg Scheduled Successfully assigned kuttl-test-peaceful-finch/elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-1-5c695c2zgg to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-1-5c695c2zgg AddedInterface Add eth0 [10.131.0.27/23] from ovn-kubernetes logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-1-5c695c2zgg.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-1-5c695c2zgg.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-1-5c695c2zgg.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-1-5c695c2zgg.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:29 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-1-5c695746b5 to 1 deployment-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-1-5c695c2zgg.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-1-5c695c2zgg.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:39 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-1-5c695c2zgg.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:45 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-1-5c695c2zgg.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:56 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-jjw2p Scheduled Successfully assigned kuttl-test-peaceful-finch/simple-prod-collector-5499b86c46-jjw2p to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:56 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-jjw2p AddedInterface Add eth0 [10.129.2.46/23] from ovn-kubernetes logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:56 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-jjw2p.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:56 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-jjw2p.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:56 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-jjw2p.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:56 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5499b86c46 SuccessfulCreate Created pod: simple-prod-collector-5499b86c46-jjw2p replicaset-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:56 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-5499b86c46 to 1 deployment-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:56 +0000 UTC Normal Pod simple-prod-query-774bb69579-9xzvh Scheduled Successfully assigned kuttl-test-peaceful-finch/simple-prod-query-774bb69579-9xzvh to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:56 +0000 UTC Normal Pod simple-prod-query-774bb69579-9xzvh AddedInterface Add eth0 [10.128.2.24/23] from ovn-kubernetes logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:56 +0000 UTC Normal Pod simple-prod-query-774bb69579-9xzvh.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:56 +0000 UTC Normal Pod simple-prod-query-774bb69579-9xzvh.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:56 +0000 UTC Normal Pod simple-prod-query-774bb69579-9xzvh.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:56 +0000 UTC Normal Pod simple-prod-query-774bb69579-9xzvh.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:56 +0000 UTC Normal ReplicaSet.apps simple-prod-query-774bb69579 SuccessfulCreate Created pod: simple-prod-query-774bb69579-9xzvh replicaset-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:56 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-774bb69579 to 1 deployment-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:57 +0000 UTC Normal Pod simple-prod-query-774bb69579-9xzvh.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:57 +0000 UTC Normal Pod simple-prod-query-774bb69579-9xzvh.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:57 +0000 UTC Normal Pod simple-prod-query-774bb69579-9xzvh.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:57 +0000 UTC Normal Pod simple-prod-query-774bb69579-9xzvh.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:56:57 +0000 UTC Normal Pod simple-prod-query-774bb69579-9xzvh.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:00 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-27g5s Scheduled Successfully assigned kuttl-test-peaceful-finch/simple-prod-collector-5499b86c46-27g5s to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:00 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5499b86c46 SuccessfulCreate Created pod: simple-prod-collector-5499b86c46-27g5s replicaset-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:00 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-5499b86c46 to 2 from 1 deployment-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:00 +0000 UTC Normal Pod simple-prod-query-774bb69579-lrqw7 Scheduled Successfully assigned kuttl-test-peaceful-finch/simple-prod-query-774bb69579-lrqw7 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:00 +0000 UTC Normal ReplicaSet.apps simple-prod-query-774bb69579 SuccessfulCreate Created pod: simple-prod-query-774bb69579-lrqw7 replicaset-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:00 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-774bb69579 to 2 from 1 deployment-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:01 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-27g5s AddedInterface Add eth0 [10.128.2.25/23] from ovn-kubernetes logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:01 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-27g5s.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:01 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-27g5s.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:01 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-27g5s.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:01 +0000 UTC Normal Pod simple-prod-query-774bb69579-lrqw7 AddedInterface Add eth0 [10.129.2.47/23] from ovn-kubernetes logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:01 +0000 UTC Normal Pod simple-prod-query-774bb69579-lrqw7.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:03 +0000 UTC Normal Pod simple-prod-query-774bb69579-lrqw7.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" in 1.998248157s (1.998258657s including waiting) kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:03 +0000 UTC Normal Pod simple-prod-query-774bb69579-lrqw7.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:03 +0000 UTC Normal Pod simple-prod-query-774bb69579-lrqw7.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:03 +0000 UTC Normal Pod simple-prod-query-774bb69579-lrqw7.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:03 +0000 UTC Normal Pod simple-prod-query-774bb69579-lrqw7.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:03 +0000 UTC Normal Pod simple-prod-query-774bb69579-lrqw7.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:03 +0000 UTC Normal Pod simple-prod-query-774bb69579-lrqw7.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:05 +0000 UTC Normal Pod simple-prod-query-774bb69579-lrqw7.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" in 1.755742313s (1.755758433s including waiting) kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:05 +0000 UTC Normal Pod simple-prod-query-774bb69579-lrqw7.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:05 +0000 UTC Normal Pod simple-prod-query-774bb69579-lrqw7.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:10 +0000 UTC Normal Pod simple-prod-query-774bb69579-9xzvh.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:10 +0000 UTC Normal Pod simple-prod-query-774bb69579-9xzvh.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:10 +0000 UTC Normal Pod simple-prod-query-774bb69579-9xzvh.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:10 +0000 UTC Normal Pod simple-prod-query-774bb69579-lrqw7.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:10 +0000 UTC Normal Pod simple-prod-query-774bb69579-lrqw7.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:10 +0000 UTC Normal Pod simple-prod-query-774bb69579-lrqw7.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:10 +0000 UTC Normal ReplicaSet.apps simple-prod-query-774bb69579 SuccessfulDelete Deleted pod: simple-prod-query-774bb69579-9xzvh replicaset-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:10 +0000 UTC Normal ReplicaSet.apps simple-prod-query-774bb69579 SuccessfulDelete Deleted pod: simple-prod-query-774bb69579-lrqw7 replicaset-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:10 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-774bb69579 to 0 from 2 deployment-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:11 +0000 UTC Normal Pod simple-prod-query-58589dc945-5d9rh Scheduled Successfully assigned kuttl-test-peaceful-finch/simple-prod-query-58589dc945-5d9rh to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:11 +0000 UTC Normal Pod simple-prod-query-58589dc945-xjm75 Scheduled Successfully assigned kuttl-test-peaceful-finch/simple-prod-query-58589dc945-xjm75 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:11 +0000 UTC Normal ReplicaSet.apps simple-prod-query-58589dc945 SuccessfulCreate Created pod: simple-prod-query-58589dc945-xjm75 replicaset-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:11 +0000 UTC Normal ReplicaSet.apps simple-prod-query-58589dc945 SuccessfulCreate Created pod: simple-prod-query-58589dc945-5d9rh replicaset-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:11 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-58589dc945 to 2 deployment-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:12 +0000 UTC Normal Pod simple-prod-query-58589dc945-5d9rh AddedInterface Add eth0 [10.128.2.26/23] from ovn-kubernetes logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:12 +0000 UTC Normal Pod simple-prod-query-58589dc945-5d9rh.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:12 +0000 UTC Normal Pod simple-prod-query-58589dc945-5d9rh.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:12 +0000 UTC Normal Pod simple-prod-query-58589dc945-5d9rh.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:12 +0000 UTC Normal Pod simple-prod-query-58589dc945-5d9rh.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:12 +0000 UTC Normal Pod simple-prod-query-58589dc945-5d9rh.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:12 +0000 UTC Normal Pod simple-prod-query-58589dc945-5d9rh.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:12 +0000 UTC Normal Pod simple-prod-query-58589dc945-5d9rh.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:12 +0000 UTC Normal Pod simple-prod-query-58589dc945-5d9rh.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:12 +0000 UTC Normal Pod simple-prod-query-58589dc945-5d9rh.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:12 +0000 UTC Normal Pod simple-prod-query-58589dc945-xjm75 AddedInterface Add eth0 [10.129.2.48/23] from ovn-kubernetes logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:12 +0000 UTC Normal Pod simple-prod-query-58589dc945-xjm75.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:12 +0000 UTC Normal Pod simple-prod-query-58589dc945-xjm75.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:12 +0000 UTC Normal Pod simple-prod-query-58589dc945-xjm75.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:12 +0000 UTC Normal Pod simple-prod-query-58589dc945-xjm75.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:12 +0000 UTC Normal Pod simple-prod-query-58589dc945-xjm75.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:12 +0000 UTC Normal Pod simple-prod-query-58589dc945-xjm75.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:12 +0000 UTC Normal Pod simple-prod-query-58589dc945-xjm75.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:12 +0000 UTC Normal Pod simple-prod-query-58589dc945-xjm75.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:12 +0000 UTC Normal Pod simple-prod-query-58589dc945-xjm75.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:15 +0000 UTC Normal Pod check-span-2xvvc Scheduled Successfully assigned kuttl-test-peaceful-finch/check-span-2xvvc to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:15 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-2xvvc job-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:15 +0000 UTC Normal Pod report-span-p8kjj Scheduled Successfully assigned kuttl-test-peaceful-finch/report-span-p8kjj to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:15 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-p8kjj job-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:16 +0000 UTC Normal Pod check-span-2xvvc AddedInterface Add eth0 [10.128.2.27/23] from ovn-kubernetes logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:16 +0000 UTC Normal Pod check-span-2xvvc.spec.containers{asserts-container} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:16 +0000 UTC Normal Pod report-span-p8kjj AddedInterface Add eth0 [10.129.2.49/23] from ovn-kubernetes logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:16 +0000 UTC Normal Pod report-span-p8kjj.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:16 +0000 UTC Normal Pod report-span-p8kjj.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:16 +0000 UTC Normal Pod report-span-p8kjj.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:17 +0000 UTC Normal Pod check-span-2xvvc.spec.containers{asserts-container} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" in 1.495997717s (1.496014678s including waiting) kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:18 +0000 UTC Normal Pod check-span-2xvvc.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:18 +0000 UTC Normal Pod check-span-2xvvc.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:29 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-2-57f9c294j7 Scheduled Successfully assigned kuttl-test-peaceful-finch/elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-2-57f9c294j7 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-2-57f9c294j7 AddedInterface Add eth0 [10.129.2.50/23] from ovn-kubernetes logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-2-57f9c294j7.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-2-57f9c294j7.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:30 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-2-57f9cddfb9 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-2-57f9c294j7 replicaset-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:30 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-2 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-2-57f9cddfb9 to 1 deployment-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:30 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-27g5s.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:30 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-jjw2p.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:30 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5499b86c46 SuccessfulDelete Deleted pod: simple-prod-collector-5499b86c46-jjw2p replicaset-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:30 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5499b86c46 SuccessfulDelete Deleted pod: simple-prod-collector-5499b86c46-27g5s replicaset-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:30 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled down replica set simple-prod-collector-5499b86c46 to 0 from 2 deployment-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:30 +0000 UTC Normal Pod simple-prod-query-58589dc945-5d9rh.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:30 +0000 UTC Normal Pod simple-prod-query-58589dc945-5d9rh.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:30 +0000 UTC Normal Pod simple-prod-query-58589dc945-5d9rh.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:30 +0000 UTC Normal Pod simple-prod-query-58589dc945-xjm75.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:30 +0000 UTC Normal Pod simple-prod-query-58589dc945-xjm75.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:30 +0000 UTC Normal Pod simple-prod-query-58589dc945-xjm75.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:30 +0000 UTC Normal ReplicaSet.apps simple-prod-query-58589dc945 SuccessfulDelete Deleted pod: simple-prod-query-58589dc945-5d9rh replicaset-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:30 +0000 UTC Normal ReplicaSet.apps simple-prod-query-58589dc945 SuccessfulDelete Deleted pod: simple-prod-query-58589dc945-xjm75 replicaset-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:30 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-58589dc945 to 0 from 2 deployment-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:30 +0000 UTC Warning Endpoints simple-prod-query FailedToUpdateEndpoint Failed to update endpoint kuttl-test-peaceful-finch/simple-prod-query: Operation cannot be fulfilled on endpoints "simple-prod-query": the object has been modified; please apply your changes to the latest version and try again endpoint-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-2-57f9c294j7.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-2-57f9c294j7.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-2-57f9c294j7.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpeacefulfinchsimpleprod-2-57f9c294j7.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:31 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-7dkt5 Scheduled Successfully assigned kuttl-test-peaceful-finch/simple-prod-collector-55656dcb65-7dkt5 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:31 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-fgdds Scheduled Successfully assigned kuttl-test-peaceful-finch/simple-prod-collector-55656dcb65-fgdds to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:31 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-55656dcb65 SuccessfulCreate Created pod: simple-prod-collector-55656dcb65-fgdds replicaset-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:31 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-55656dcb65 SuccessfulCreate Created pod: simple-prod-collector-55656dcb65-7dkt5 replicaset-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:31 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-55656dcb65 to 2 deployment-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:31 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-6srdd Scheduled Successfully assigned kuttl-test-peaceful-finch/simple-prod-query-56774dcfc4-6srdd to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:31 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-kn2kc Scheduled Successfully assigned kuttl-test-peaceful-finch/simple-prod-query-56774dcfc4-kn2kc to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:31 +0000 UTC Normal ReplicaSet.apps simple-prod-query-56774dcfc4 SuccessfulCreate Created pod: simple-prod-query-56774dcfc4-kn2kc replicaset-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:31 +0000 UTC Normal ReplicaSet.apps simple-prod-query-56774dcfc4 SuccessfulCreate Created pod: simple-prod-query-56774dcfc4-6srdd replicaset-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:31 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-56774dcfc4 to 2 deployment-controller logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-7dkt5 AddedInterface Add eth0 [10.129.2.52/23] from ovn-kubernetes logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-7dkt5.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-7dkt5.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-7dkt5.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-fgdds AddedInterface Add eth0 [10.128.2.29/23] from ovn-kubernetes logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-fgdds.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-fgdds.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-fgdds.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-6srdd AddedInterface Add eth0 [10.129.2.51/23] from ovn-kubernetes logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-6srdd.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-6srdd.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-6srdd.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-6srdd.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-6srdd.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-6srdd.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-6srdd.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-kn2kc AddedInterface Add eth0 [10.128.2.28/23] from ovn-kubernetes logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-kn2kc.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-kn2kc.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-kn2kc.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-kn2kc.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-kn2kc.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-kn2kc.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-kn2kc.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-kn2kc.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:32 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-kn2kc.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:33 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-6srdd.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 12:57:34 | es-increasing-replicas | 2023-10-02 12:57:33 +0000 UTC Normal Pod simple-prod-query-56774dcfc4-6srdd.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 12:57:34 | es-increasing-replicas | Deleting namespace: kuttl-test-peaceful-finch === CONT kuttl/harness/es-index-cleaner-autoprov logger.go:42: 12:58:10 | es-index-cleaner-autoprov | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 12:58:10 | es-index-cleaner-autoprov | Creating namespace: kuttl-test-touching-sunbeam logger.go:42: 12:58:10 | es-index-cleaner-autoprov/1-install | starting test step 1-install logger.go:42: 12:58:10 | es-index-cleaner-autoprov/1-install | Jaeger:kuttl-test-touching-sunbeam/test-es-index-cleaner-with-prefix created logger.go:42: 12:58:46 | es-index-cleaner-autoprov/1-install | test step completed 1-install logger.go:42: 12:58:46 | es-index-cleaner-autoprov/2-report-spans | starting test step 2-report-spans logger.go:42: 12:58:46 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE test-es-index-cleaner-with-prefix /dev/null] logger.go:42: 12:58:47 | es-index-cleaner-autoprov/2-report-spans | Warning: resource jaegers/test-es-index-cleaner-with-prefix is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 12:58:53 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c DAYS=5 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 JOB_NUMBER=00 JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-00-job.yaml] logger.go:42: 12:58:54 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c kubectl apply -f report-span-00-job.yaml -n $NAMESPACE] logger.go:42: 12:58:54 | es-index-cleaner-autoprov/2-report-spans | job.batch/00-report-span created logger.go:42: 12:59:33 | es-index-cleaner-autoprov/2-report-spans | test step completed 2-report-spans logger.go:42: 12:59:33 | es-index-cleaner-autoprov/3-install | starting test step 3-install logger.go:42: 12:59:33 | es-index-cleaner-autoprov/3-install | Jaeger:kuttl-test-touching-sunbeam/test-es-index-cleaner-with-prefix updated logger.go:42: 12:59:33 | es-index-cleaner-autoprov/3-install | test step completed 3-install logger.go:42: 12:59:33 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | starting test step 4-wait-es-index-cleaner logger.go:42: 12:59:33 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | running command: [sh -c go run ../../../../cmd-utils/wait-cronjob/main.go --cronjob test-es-index-cleaner-with-prefix-es-index-cleaner --namespace $NAMESPACE] logger.go:42: 12:59:35 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-10-02T12:59:35Z" level=debug msg="Checking if the test-es-index-cleaner-with-prefix-es-index-cleaner CronJob exists" logger.go:42: 12:59:35 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-10-02T12:59:35Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 12:59:35 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-10-02T12:59:35Z" level=info msg="Cronjob test-es-index-cleaner-with-prefix-es-index-cleaner found successfully" logger.go:42: 12:59:35 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-10-02T12:59:35Z" level=debug msg="Waiting for the next scheduled job from test-es-index-cleaner-with-prefix-es-index-cleaner cronjob" logger.go:42: 12:59:35 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-10-02T12:59:35Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 12:59:35 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-10-02T12:59:35Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 12:59:45 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-10-02T12:59:45Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 12:59:55 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-10-02T12:59:55Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 13:00:05 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-10-02T13:00:05Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 13:00:15 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-10-02T13:00:15Z" level=info msg="Job of owner test-es-index-cleaner-with-prefix-es-index-cleaner succeeded after test-es-index-cleaner-with-prefix-es-index-cleaner 40.02438424s" logger.go:42: 13:00:15 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | test step completed 4-wait-es-index-cleaner logger.go:42: 13:00:15 | es-index-cleaner-autoprov/5-install | starting test step 5-install logger.go:42: 13:00:15 | es-index-cleaner-autoprov/5-install | Jaeger:kuttl-test-touching-sunbeam/test-es-index-cleaner-with-prefix updated logger.go:42: 13:00:15 | es-index-cleaner-autoprov/5-install | test step completed 5-install logger.go:42: 13:00:15 | es-index-cleaner-autoprov/6-check-indices | starting test step 6-check-indices logger.go:42: 13:00:15 | es-index-cleaner-autoprov/6-check-indices | Job:kuttl-test-touching-sunbeam/00-check-indices created logger.go:42: 13:00:19 | es-index-cleaner-autoprov/6-check-indices | test step completed 6-check-indices logger.go:42: 13:00:19 | es-index-cleaner-autoprov | es-index-cleaner-autoprov events from ns kuttl-test-touching-sunbeam: logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:16 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltesttouchingsunbeamtestesindexc-1-67c7b95f5 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltesttouchingsunbeamtestesindexc-1-6qr9cs replicaset-controller logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:16 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttouchingsunbeamtestesindexc-1-6qr9cs Scheduled Successfully assigned kuttl-test-touching-sunbeam/elasticsearch-cdm-kuttltesttouchingsunbeamtestesindexc-1-6qr9cs to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:16 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltesttouchingsunbeamtestesindexc-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltesttouchingsunbeamtestesindexc-1-67c7b95f5 to 1 deployment-controller logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttouchingsunbeamtestesindexc-1-6qr9cs AddedInterface Add eth0 [10.131.0.28/23] from ovn-kubernetes logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttouchingsunbeamtestesindexc-1-6qr9cs.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttouchingsunbeamtestesindexc-1-6qr9cs.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttouchingsunbeamtestesindexc-1-6qr9cs.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttouchingsunbeamtestesindexc-1-6qr9cs.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttouchingsunbeamtestesindexc-1-6qr9cs.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttouchingsunbeamtestesindexc-1-6qr9cs.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:27 +0000 UTC Warning Pod elasticsearch-cdm-kuttltesttouchingsunbeamtestesindexc-1-6qr9cs.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:32 +0000 UTC Warning Pod elasticsearch-cdm-kuttltesttouchingsunbeamtestesindexc-1-6qr9cs.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:43 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-8659b69c48-ldwc6 Scheduled Successfully assigned kuttl-test-touching-sunbeam/test-es-index-cleaner-with-prefix-collector-8659b69c48-ldwc6 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:43 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-collector-8659b69c48 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-collector-8659b69c48-ldwc6 replicaset-controller logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:43 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-collector ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-collector-8659b69c48 to 1 deployment-controller logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:43 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f7449bff7-l2pj7 Scheduled Successfully assigned kuttl-test-touching-sunbeam/test-es-index-cleaner-with-prefix-query-5f7449bff7-l2pj7 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:43 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-5f7449bff7 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-query-5f7449bff7-l2pj7 replicaset-controller logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:43 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-query-5f7449bff7 to 1 deployment-controller logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-8659b69c48-ldwc6 AddedInterface Add eth0 [10.129.2.53/23] from ovn-kubernetes logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-8659b69c48-ldwc6.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-8659b69c48-ldwc6.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-8659b69c48-ldwc6.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f7449bff7-l2pj7 AddedInterface Add eth0 [10.129.2.54/23] from ovn-kubernetes logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f7449bff7-l2pj7.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f7449bff7-l2pj7.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f7449bff7-l2pj7.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f7449bff7-l2pj7.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f7449bff7-l2pj7.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f7449bff7-l2pj7.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f7449bff7-l2pj7.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f7449bff7-l2pj7.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f7449bff7-l2pj7.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:50 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5df66dccd6-ckqwg Scheduled Successfully assigned kuttl-test-touching-sunbeam/test-es-index-cleaner-with-prefix-query-5df66dccd6-ckqwg to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:50 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-5df66dccd6 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-query-5df66dccd6-ckqwg replicaset-controller logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:50 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f7449bff7-l2pj7.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:50 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f7449bff7-l2pj7.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:50 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f7449bff7-l2pj7.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:50 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-5f7449bff7 SuccessfulDelete Deleted pod: test-es-index-cleaner-with-prefix-query-5f7449bff7-l2pj7 replicaset-controller logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:50 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled down replica set test-es-index-cleaner-with-prefix-query-5f7449bff7 to 0 from 1 deployment-controller logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:50 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-query-5df66dccd6 to 1 deployment-controller logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5df66dccd6-ckqwg AddedInterface Add eth0 [10.128.2.30/23] from ovn-kubernetes logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5df66dccd6-ckqwg.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5df66dccd6-ckqwg.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5df66dccd6-ckqwg.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5df66dccd6-ckqwg.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5df66dccd6-ckqwg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5df66dccd6-ckqwg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5df66dccd6-ckqwg.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5df66dccd6-ckqwg.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5df66dccd6-ckqwg.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:54 +0000 UTC Normal Pod 00-report-span-bhg29 Scheduled Successfully assigned kuttl-test-touching-sunbeam/00-report-span-bhg29 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:54 +0000 UTC Normal Pod 00-report-span-bhg29 AddedInterface Add eth0 [10.129.2.55/23] from ovn-kubernetes logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:54 +0000 UTC Normal Pod 00-report-span-bhg29.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:54 +0000 UTC Normal Pod 00-report-span-bhg29.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:54 +0000 UTC Normal Pod 00-report-span-bhg29.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:54 +0000 UTC Normal Job.batch 00-report-span SuccessfulCreate Created pod: 00-report-span-bhg29 job-controller logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:58 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:58 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:58:58 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:59:33 +0000 UTC Normal Job.batch 00-report-span Completed Job completed job-controller logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:59:44 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:59:44 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod test-es-index-cleaner-with-prefix-collector-8659b69c48-ldwc6 horizontal-pod-autoscaler logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 12:59:44 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 13:00:00 +0000 UTC Normal Job.batch test-es-index-cleaner-with-prefix-es-index-cleaner-28270860 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-es-index-cleaner-2827086xkhlf job-controller logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 13:00:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2827086xkhlf Scheduled Successfully assigned kuttl-test-touching-sunbeam/test-es-index-cleaner-with-prefix-es-index-cleaner-2827086xkhlf to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 13:00:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2827086xkhlf AddedInterface Add eth0 [10.129.2.57/23] from ovn-kubernetes logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 13:00:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2827086xkhlf.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-index-cleaner-rhel8@sha256:8ac1b958ff16ea16f4d0c7132e3d369848a829d6655e0b2338a9bef93d54f02d" kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 13:00:00 +0000 UTC Normal CronJob.batch test-es-index-cleaner-with-prefix-es-index-cleaner SuccessfulCreate Created job test-es-index-cleaner-with-prefix-es-index-cleaner-28270860 cronjob-controller logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 13:00:05 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2827086xkhlf.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-index-cleaner-rhel8@sha256:8ac1b958ff16ea16f4d0c7132e3d369848a829d6655e0b2338a9bef93d54f02d" in 4.744578678s (4.744592698s including waiting) kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 13:00:05 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2827086xkhlf.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Created Created container test-es-index-cleaner-with-prefix-es-index-cleaner kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 13:00:05 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2827086xkhlf.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Started Started container test-es-index-cleaner-with-prefix-es-index-cleaner kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 13:00:08 +0000 UTC Normal Job.batch test-es-index-cleaner-with-prefix-es-index-cleaner-28270860 Completed Job completed job-controller logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 13:00:08 +0000 UTC Normal CronJob.batch test-es-index-cleaner-with-prefix-es-index-cleaner SawCompletedJob Saw completed job: test-es-index-cleaner-with-prefix-es-index-cleaner-28270860, status: Complete cronjob-controller logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 13:00:15 +0000 UTC Normal Pod 00-check-indices-vd86q Scheduled Successfully assigned kuttl-test-touching-sunbeam/00-check-indices-vd86q to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 13:00:15 +0000 UTC Normal Job.batch 00-check-indices SuccessfulCreate Created pod: 00-check-indices-vd86q job-controller logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 13:00:16 +0000 UTC Normal Pod 00-check-indices-vd86q AddedInterface Add eth0 [10.129.2.58/23] from ovn-kubernetes logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 13:00:16 +0000 UTC Normal Pod 00-check-indices-vd86q.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 13:00:16 +0000 UTC Normal Pod 00-check-indices-vd86q.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 13:00:16 +0000 UTC Normal Pod 00-check-indices-vd86q.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:00:19 | es-index-cleaner-autoprov | 2023-10-02 13:00:19 +0000 UTC Normal Job.batch 00-check-indices Completed Job completed job-controller logger.go:42: 13:00:19 | es-index-cleaner-autoprov | Deleting namespace: kuttl-test-touching-sunbeam === CONT kuttl/harness/es-from-aio-to-production logger.go:42: 13:00:26 | es-from-aio-to-production | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:00:26 | es-from-aio-to-production | Creating namespace: kuttl-test-exact-elk logger.go:42: 13:00:26 | es-from-aio-to-production/0-install | starting test step 0-install logger.go:42: 13:00:26 | es-from-aio-to-production/0-install | Jaeger:kuttl-test-exact-elk/my-jaeger created logger.go:42: 13:00:36 | es-from-aio-to-production/0-install | test step completed 0-install logger.go:42: 13:00:36 | es-from-aio-to-production/1-smoke-test | starting test step 1-smoke-test logger.go:42: 13:00:36 | es-from-aio-to-production/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 13:00:38 | es-from-aio-to-production/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:00:45 | es-from-aio-to-production/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:00:45 | es-from-aio-to-production/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:00:45 | es-from-aio-to-production/1-smoke-test | job.batch/report-span created logger.go:42: 13:00:45 | es-from-aio-to-production/1-smoke-test | job.batch/check-span created logger.go:42: 13:00:58 | es-from-aio-to-production/1-smoke-test | test step completed 1-smoke-test logger.go:42: 13:00:58 | es-from-aio-to-production/3-install | starting test step 3-install logger.go:42: 13:00:58 | es-from-aio-to-production/3-install | Jaeger:kuttl-test-exact-elk/my-jaeger updated logger.go:42: 13:01:31 | es-from-aio-to-production/3-install | test step completed 3-install logger.go:42: 13:01:31 | es-from-aio-to-production/4-smoke-test | starting test step 4-smoke-test logger.go:42: 13:01:31 | es-from-aio-to-production/4-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 13:01:38 | es-from-aio-to-production/4-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:01:39 | es-from-aio-to-production/4-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:01:39 | es-from-aio-to-production/4-smoke-test | job.batch/report-span unchanged logger.go:42: 13:01:39 | es-from-aio-to-production/4-smoke-test | job.batch/check-span unchanged logger.go:42: 13:01:39 | es-from-aio-to-production/4-smoke-test | test step completed 4-smoke-test logger.go:42: 13:01:39 | es-from-aio-to-production | es-from-aio-to-production events from ns kuttl-test-exact-elk: logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:30 +0000 UTC Normal Pod my-jaeger-6587c58547-ftbld Scheduled Successfully assigned kuttl-test-exact-elk/my-jaeger-6587c58547-ftbld to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:30 +0000 UTC Normal ReplicaSet.apps my-jaeger-6587c58547 SuccessfulCreate Created pod: my-jaeger-6587c58547-ftbld replicaset-controller logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:30 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-6587c58547 to 1 deployment-controller logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:31 +0000 UTC Normal Pod my-jaeger-6587c58547-ftbld AddedInterface Add eth0 [10.129.2.59/23] from ovn-kubernetes logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:31 +0000 UTC Normal Pod my-jaeger-6587c58547-ftbld.spec.containers{jaeger} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:34 +0000 UTC Normal Pod my-jaeger-6587c58547-ftbld.spec.containers{jaeger} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" in 3.306311747s (3.306323737s including waiting) kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:34 +0000 UTC Normal Pod my-jaeger-6587c58547-ftbld.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:34 +0000 UTC Normal Pod my-jaeger-6587c58547-ftbld.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:34 +0000 UTC Normal Pod my-jaeger-6587c58547-ftbld.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:34 +0000 UTC Normal Pod my-jaeger-6587c58547-ftbld.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:34 +0000 UTC Normal Pod my-jaeger-6587c58547-ftbld.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:39 +0000 UTC Normal Pod my-jaeger-6587c58547-ftbld.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:39 +0000 UTC Normal Pod my-jaeger-6587c58547-ftbld.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:39 +0000 UTC Normal ReplicaSet.apps my-jaeger-6587c58547 SuccessfulDelete Deleted pod: my-jaeger-6587c58547-ftbld replicaset-controller logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:39 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-6587c58547 to 0 from 1 deployment-controller logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:40 +0000 UTC Normal Pod my-jaeger-dfdb8d564-svbz8 Scheduled Successfully assigned kuttl-test-exact-elk/my-jaeger-dfdb8d564-svbz8 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:40 +0000 UTC Normal Pod my-jaeger-dfdb8d564-svbz8 AddedInterface Add eth0 [10.131.0.29/23] from ovn-kubernetes logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:40 +0000 UTC Normal Pod my-jaeger-dfdb8d564-svbz8.spec.containers{jaeger} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:40 +0000 UTC Normal ReplicaSet.apps my-jaeger-dfdb8d564 SuccessfulCreate Created pod: my-jaeger-dfdb8d564-svbz8 replicaset-controller logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:40 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-dfdb8d564 to 1 deployment-controller logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:43 +0000 UTC Normal Pod my-jaeger-dfdb8d564-svbz8.spec.containers{jaeger} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" in 2.551777425s (2.551791255s including waiting) kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:43 +0000 UTC Normal Pod my-jaeger-dfdb8d564-svbz8.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:43 +0000 UTC Normal Pod my-jaeger-dfdb8d564-svbz8.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:43 +0000 UTC Normal Pod my-jaeger-dfdb8d564-svbz8.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:43 +0000 UTC Normal Pod my-jaeger-dfdb8d564-svbz8.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:43 +0000 UTC Normal Pod my-jaeger-dfdb8d564-svbz8.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:45 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-lzzvr job-controller logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:45 +0000 UTC Normal Pod report-span-7djp6 Scheduled Successfully assigned kuttl-test-exact-elk/report-span-7djp6 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:45 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-7djp6 job-controller logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:46 +0000 UTC Normal Pod check-span-lzzvr Scheduled Successfully assigned kuttl-test-exact-elk/check-span-lzzvr to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:46 +0000 UTC Normal Pod check-span-lzzvr AddedInterface Add eth0 [10.128.2.31/23] from ovn-kubernetes logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:46 +0000 UTC Normal Pod check-span-lzzvr.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:46 +0000 UTC Normal Pod check-span-lzzvr.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:46 +0000 UTC Normal Pod check-span-lzzvr.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:46 +0000 UTC Normal Pod report-span-7djp6 AddedInterface Add eth0 [10.129.2.60/23] from ovn-kubernetes logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:46 +0000 UTC Normal Pod report-span-7djp6.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:46 +0000 UTC Normal Pod report-span-7djp6.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:46 +0000 UTC Normal Pod report-span-7djp6.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:00:57 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexactelkmyjaeger-1-56b996dc4f-hhxkf Scheduled Successfully assigned kuttl-test-exact-elk/elasticsearch-cdm-kuttltestexactelkmyjaeger-1-56b996dc4f-hhxkf to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:01 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestexactelkmyjaeger-1-56b996dc4f SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestexactelkmyjaeger-1-56b996dc4f-hhxkf replicaset-controller logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:01 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestexactelkmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestexactelkmyjaeger-1-56b996dc4f to 1 deployment-controller logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:02 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexactelkmyjaeger-1-56b996dc4f-hhxkf AddedInterface Add eth0 [10.129.2.61/23] from ovn-kubernetes logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:02 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexactelkmyjaeger-1-56b996dc4f-hhxkf.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:02 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexactelkmyjaeger-1-56b996dc4f-hhxkf.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:02 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexactelkmyjaeger-1-56b996dc4f-hhxkf.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:02 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexactelkmyjaeger-1-56b996dc4f-hhxkf.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:02 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexactelkmyjaeger-1-56b996dc4f-hhxkf.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:02 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexactelkmyjaeger-1-56b996dc4f-hhxkf.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:12 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestexactelkmyjaeger-1-56b996dc4f-hhxkf.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:17 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestexactelkmyjaeger-1-56b996dc4f-hhxkf.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:19 +0000 UTC Normal Job.batch report-span Completed Job completed job-controller logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:28 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-2tqr6 Scheduled Successfully assigned kuttl-test-exact-elk/my-jaeger-collector-558ccfc8dd-2tqr6 to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:28 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-2tqr6 AddedInterface Add eth0 [10.128.2.32/23] from ovn-kubernetes logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:28 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-2tqr6.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:28 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-2tqr6.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:28 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-2tqr6.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:28 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-558ccfc8dd SuccessfulCreate Created pod: my-jaeger-collector-558ccfc8dd-2tqr6 replicaset-controller logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:28 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-558ccfc8dd to 1 deployment-controller logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:28 +0000 UTC Normal Pod my-jaeger-query-f6478878-qqvvl Scheduled Successfully assigned kuttl-test-exact-elk/my-jaeger-query-f6478878-qqvvl to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:28 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-f6478878 SuccessfulCreate Created pod: my-jaeger-query-f6478878-qqvvl replicaset-controller logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:28 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-f6478878 to 1 deployment-controller logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:29 +0000 UTC Normal Pod my-jaeger-query-f6478878-qqvvl AddedInterface Add eth0 [10.128.2.33/23] from ovn-kubernetes logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:29 +0000 UTC Normal Pod my-jaeger-query-f6478878-qqvvl.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:29 +0000 UTC Normal Pod my-jaeger-query-f6478878-qqvvl.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:29 +0000 UTC Normal Pod my-jaeger-query-f6478878-qqvvl.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:29 +0000 UTC Normal Pod my-jaeger-query-f6478878-qqvvl.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:29 +0000 UTC Normal Pod my-jaeger-query-f6478878-qqvvl.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:29 +0000 UTC Normal Pod my-jaeger-query-f6478878-qqvvl.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:29 +0000 UTC Normal Pod my-jaeger-query-f6478878-qqvvl.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:29 +0000 UTC Normal Pod my-jaeger-query-f6478878-qqvvl.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:29 +0000 UTC Normal Pod my-jaeger-query-f6478878-qqvvl.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:31 +0000 UTC Normal Pod my-jaeger-dfdb8d564-svbz8.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | 2023-10-02 13:01:31 +0000 UTC Normal Pod my-jaeger-dfdb8d564-svbz8.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:01:39 | es-from-aio-to-production | Deleting namespace: kuttl-test-exact-elk === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- FAIL: kuttl (1216.42s) --- FAIL: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.96s) --- PASS: kuttl/harness/es-multiinstance (123.63s) --- FAIL: kuttl/harness/es-streaming-autoprovisioned (507.98s) --- PASS: kuttl/harness/es-simple-prod (6.08s) --- PASS: kuttl/harness/es-rollover-autoprov (248.35s) --- PASS: kuttl/harness/es-increasing-replicas (108.68s) --- PASS: kuttl/harness/es-index-cleaner-autoprov (135.92s) --- PASS: kuttl/harness/es-from-aio-to-production (79.77s) FAIL + exit_code=1 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name elasticsearch --report --output /logs/artifacts/elasticsearch.xml ./artifacts/kuttl-report.xml time="2023-10-02T13:01:47Z" level=debug msg="Setting a new name for the test suites" time="2023-10-02T13:01:47Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-10-02T13:01:47Z" level=debug msg="normalizing test case names" time="2023-10-02T13:01:47Z" level=debug msg="elasticsearch/artifacts -> elasticsearch_artifacts" time="2023-10-02T13:01:47Z" level=debug msg="elasticsearch/es-multiinstance -> elasticsearch_es_multiinstance" time="2023-10-02T13:01:47Z" level=debug msg="elasticsearch/es-streaming-autoprovisioned -> elasticsearch_es_streaming_autoprovisioned" time="2023-10-02T13:01:47Z" level=debug msg="elasticsearch/es-simple-prod -> elasticsearch_es_simple_prod" time="2023-10-02T13:01:47Z" level=debug msg="elasticsearch/es-rollover-autoprov -> elasticsearch_es_rollover_autoprov" time="2023-10-02T13:01:47Z" level=debug msg="elasticsearch/es-increasing-replicas -> elasticsearch_es_increasing_replicas" time="2023-10-02T13:01:47Z" level=debug msg="elasticsearch/es-index-cleaner-autoprov -> elasticsearch_es_index_cleaner_autoprov" time="2023-10-02T13:01:47Z" level=debug msg="elasticsearch/es-from-aio-to-production -> elasticsearch_es_from_aio_to_production" +--------------------------------------------+--------+ | NAME | RESULT | +--------------------------------------------+--------+ | elasticsearch_artifacts | passed | | elasticsearch_es_multiinstance | passed | | elasticsearch_es_streaming_autoprovisioned | failed | | elasticsearch_es_simple_prod | passed | | elasticsearch_es_rollover_autoprov | passed | | elasticsearch_es_increasing_replicas | passed | | elasticsearch_es_index_cleaner_autoprov | passed | | elasticsearch_es_from_aio_to_production | passed | +--------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 1 -gt 0 ']' + count=1 + '[' 1 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh examples false true + '[' 3 -ne 3 ']' + test_suite_name=examples + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/examples.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-examples make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=0.32.0 \ SKIP_KAFKA=false \ VERTX_IMG=jaegertracing/vertx-create-span:operator-e2e-tests \ ./tests/e2e/examples/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-29-231104 True False 27m Cluster version is 4.14.0-0.nightly-2023-09-29-231104' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-29-231104 True False 27m Cluster version is 4.14.0-0.nightly-2023-09-29-231104' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 0.32.0 ']' ++ version_le 0.32.0 0.25.0 +++ echo 0.32.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 0.32.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/examples/render.sh ++ export SUITE_DIR=./tests/e2e/examples ++ SUITE_DIR=./tests/e2e/examples ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/examples ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test examples-agent-as-daemonset + '[' 1 -ne 1 ']' + test_name=examples-agent-as-daemonset + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-as-daemonset' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-as-daemonset\e[0m' Rendering files for test examples-agent-as-daemonset + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build + '[' _build '!=' _build ']' + mkdir -p examples-agent-as-daemonset + cd examples-agent-as-daemonset + example_name=agent-as-daemonset + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + render_install_example agent-as-daemonset 01 + '[' 2 -ne 2 ']' + example_name=agent-as-daemonset + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/agent-as-daemonset.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + JAEGER_NAME=agent-as-daemonset + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=DaemonSet ++ '[' DaemonSet = null ']' ++ echo DaemonSet ++ return 0 + jaeger_strategy=DaemonSet + '[' DaemonSet = DaemonSet ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example agent-as-daemonset 02 + '[' 2 -ne 2 ']' + example_name=agent-as-daemonset + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/agent-as-daemonset.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/agent-as-daemonset.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/agent-as-daemonset.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/agent-as-daemonset.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + jaeger_name=agent-as-daemonset + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test agent-as-daemonset true 02 + '[' 3 -ne 3 ']' + jaeger=agent-as-daemonset + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-agent-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-agent-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-with-priority-class\e[0m' Rendering files for test examples-agent-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-as-daemonset + '[' examples-agent-as-daemonset '!=' _build ']' + cd .. + mkdir -p examples-agent-with-priority-class + cd examples-agent-with-priority-class + example_name=agent-with-priority-class + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + render_install_example agent-with-priority-class 01 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/agent-with-priority-class.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + JAEGER_NAME=agent-as-daemonset + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=DaemonSet ++ '[' DaemonSet = null ']' ++ echo DaemonSet ++ return 0 + jaeger_strategy=DaemonSet + '[' DaemonSet = DaemonSet ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example agent-with-priority-class 02 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + jaeger_name=agent-as-daemonset + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test agent-as-daemonset true 02 + '[' 3 -ne 3 ']' + jaeger=agent-as-daemonset + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-all-in-one-with-options + '[' 1 -ne 1 ']' + test_name=examples-all-in-one-with-options + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-all-in-one-with-options' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-all-in-one-with-options\e[0m' Rendering files for test examples-all-in-one-with-options + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-with-priority-class + '[' examples-agent-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-all-in-one-with-options + cd examples-all-in-one-with-options + example_name=all-in-one-with-options + render_install_example all-in-one-with-options 00 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/all-in-one-with-options.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + JAEGER_NAME=my-jaeger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.metadata.name="my-jaeger"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i 'del(.spec.allInOne.image)' ./00-install.yaml + render_smoke_test_example all-in-one-with-options 01 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + jaeger_name=my-jaeger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + sed -i s~my-jaeger-query:443~my-jaeger-query:443/jaeger~gi ./01-smoke-test.yaml + '[' false = true ']' + start_test examples-auto-provision-kafka + '[' 1 -ne 1 ']' + test_name=examples-auto-provision-kafka + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-auto-provision-kafka' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-auto-provision-kafka\e[0m' Rendering files for test examples-auto-provision-kafka + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-all-in-one-with-options + '[' examples-all-in-one-with-options '!=' _build ']' + cd .. + mkdir -p examples-auto-provision-kafka + cd examples-auto-provision-kafka + example_name=auto-provision-kafka + render_install_kafka_operator 01 + '[' 1 -ne 1 ']' + test_step=01 + '[' true '!=' true ']' + render_install_example auto-provision-kafka 02 + '[' 2 -ne 2 ']' + example_name=auto-provision-kafka + test_step=02 + install_file=./02-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/auto-provision-kafka.yaml -o ./02-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./02-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./02-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./02-install.yaml ++ jaeger_name=auto-provision-kafka ++ '[' -z auto-provision-kafka ']' ++ echo auto-provision-kafka ++ return 0 + JAEGER_NAME=auto-provision-kafka + local jaeger_strategy ++ get_jaeger_strategy ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./02-install.yaml ++ strategy=streaming ++ '[' streaming = production ']' ++ '[' streaming = streaming ']' ++ echo streaming ++ return 0 + jaeger_strategy=streaming + '[' streaming = DaemonSet ']' + '[' streaming = allInOne ']' + '[' streaming = production ']' + '[' streaming = streaming ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/streaming-jaeger-assert.yaml.template -o ./02-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./02-install.yaml + mv ./02-assert.yaml ./05-assert.yaml + render_assert_kafka true auto-provision-kafka 02 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provision-kafka + test_step=02 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provision-kafka + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./02-assert.yaml ++ expr 02 + 1 + CLUSTER_NAME=auto-provision-kafka + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./3-assert.yaml ++ expr 02 + 2 + CLUSTER_NAME=auto-provision-kafka + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./04-assert.yaml + render_smoke_test_example auto-provision-kafka 06 + '[' 2 -ne 2 ']' + example_name=auto-provision-kafka + test_step=06 + deployment_file=/tmp/jaeger-tests/examples/auto-provision-kafka.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/auto-provision-kafka.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/auto-provision-kafka.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/auto-provision-kafka.yaml ++ jaeger_name=auto-provision-kafka ++ '[' -z auto-provision-kafka ']' ++ echo auto-provision-kafka ++ return 0 + jaeger_name=auto-provision-kafka + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test auto-provision-kafka true 06 + '[' 3 -ne 3 ']' + jaeger=auto-provision-kafka + is_secured=true + test_step=06 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://auto-provision-kafka-query:443 + JAEGER_QUERY_ENDPOINT=https://auto-provision-kafka-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://auto-provision-kafka-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://auto-provision-kafka-collector-headless:14268 + export JAEGER_NAME=auto-provision-kafka + JAEGER_NAME=auto-provision-kafka + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./06-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-business-application-injected-sidecar + '[' 1 -ne 1 ']' + test_name=examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-business-application-injected-sidecar' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-business-application-injected-sidecar\e[0m' Rendering files for test examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-auto-provision-kafka + '[' examples-auto-provision-kafka '!=' _build ']' + cd .. + mkdir -p examples-business-application-injected-sidecar + cd examples-business-application-injected-sidecar + example_name=simplest + cp /tmp/jaeger-tests/examples/business-application-injected-sidecar.yaml ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].image=strenv(VERTX_IMG)' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.path="/"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.port=8080' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.initialDelaySeconds=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.failureThreshold=3' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.periodSeconds=10' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.successThreshold=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.timeoutSeconds=1' ./00-install.yaml + render_install_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example simplest 02 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 02 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-collector-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-collector-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-collector-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-collector-with-priority-class\e[0m' Rendering files for test examples-collector-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-business-application-injected-sidecar + '[' examples-business-application-injected-sidecar '!=' _build ']' + cd .. + mkdir -p examples-collector-with-priority-class + cd examples-collector-with-priority-class + example_name=collector-with-priority-class + render_install_example collector-with-priority-class 00 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/collector-with-priority-class.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + JAEGER_NAME=collector-with-high-priority + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example collector-with-priority-class 01 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + jaeger_name=collector-with-high-priority + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test collector-with-high-priority true 01 + '[' 3 -ne 3 ']' + jaeger=collector-with-high-priority + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + export JAEGER_NAME=collector-with-high-priority + JAEGER_NAME=collector-with-high-priority + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-service-types + '[' 1 -ne 1 ']' + test_name=examples-service-types + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-service-types' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-service-types\e[0m' Rendering files for test examples-service-types + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-collector-with-priority-class + '[' examples-collector-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-service-types + cd examples-service-types + example_name=service-types + render_install_example service-types 00 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/service-types.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + JAEGER_NAME=service-types + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example service-types 01 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/service-types.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/service-types.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/service-types.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/service-types.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + jaeger_name=service-types + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test service-types true 01 + '[' 3 -ne 3 ']' + jaeger=service-types + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + export JAEGER_NAME=service-types + JAEGER_NAME=service-types + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod + '[' 1 -ne 1 ']' + test_name=examples-simple-prod + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod\e[0m' Rendering files for test examples-simple-prod + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-service-types + '[' examples-service-types '!=' _build ']' + cd .. + mkdir -p examples-simple-prod + cd examples-simple-prod + example_name=simple-prod + render_install_example simple-prod 01 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod 02 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod-with-volumes + '[' 1 -ne 1 ']' + test_name=examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod-with-volumes' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod-with-volumes\e[0m' Rendering files for test examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod + '[' examples-simple-prod '!=' _build ']' + cd .. + mkdir -p examples-simple-prod-with-volumes + cd examples-simple-prod-with-volumes + example_name=simple-prod-with-volumes + render_install_example simple-prod-with-volumes 01 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod-with-volumes 02 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + /tmp/jaeger-tests/bin/gomplate -f ./03-check-volume.yaml.template -o 03-check-volume.yaml + start_test examples-simplest + '[' 1 -ne 1 ']' + test_name=examples-simplest + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simplest' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simplest\e[0m' Rendering files for test examples-simplest + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod-with-volumes + '[' examples-simple-prod-with-volumes '!=' _build ']' + cd .. + mkdir -p examples-simplest + cd examples-simplest + example_name=simplest + render_install_example simplest 00 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 01 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger + '[' 1 -ne 1 ']' + test_name=examples-with-badger + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger\e[0m' Rendering files for test examples-with-badger + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simplest + '[' examples-simplest '!=' _build ']' + cd .. + mkdir -p examples-with-badger + cd examples-with-badger + example_name=with-badger + render_install_example with-badger 00 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + JAEGER_NAME=with-badger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger 01 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + jaeger_name=with-badger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + export JAEGER_NAME=with-badger + JAEGER_NAME=with-badger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger-and-volume + '[' 1 -ne 1 ']' + test_name=examples-with-badger-and-volume + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger-and-volume' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger-and-volume\e[0m' Rendering files for test examples-with-badger-and-volume + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger + '[' examples-with-badger '!=' _build ']' + cd .. + mkdir -p examples-with-badger-and-volume + cd examples-with-badger-and-volume + example_name=with-badger-and-volume + render_install_example with-badger-and-volume 00 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger-and-volume.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + JAEGER_NAME=with-badger-and-volume + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger-and-volume 01 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + jaeger_name=with-badger-and-volume + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger-and-volume true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger-and-volume + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + export JAEGER_NAME=with-badger-and-volume + JAEGER_NAME=with-badger-and-volume + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-cassandra + '[' 1 -ne 1 ']' + test_name=examples-with-cassandra + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-cassandra' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-cassandra\e[0m' Rendering files for test examples-with-cassandra + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger-and-volume + '[' examples-with-badger-and-volume '!=' _build ']' + cd .. + mkdir -p examples-with-cassandra + cd examples-with-cassandra + example_name=with-cassandra + render_install_cassandra 00 + '[' 1 -ne 1 ']' + test_step=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-assert.yaml.template -o ./00-assert.yaml + render_install_example with-cassandra 01 + '[' 2 -ne 2 ']' + example_name=with-cassandra + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-cassandra.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=with-cassandra ++ '[' -z with-cassandra ']' ++ echo with-cassandra ++ return 0 + JAEGER_NAME=with-cassandra + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example with-cassandra 02 + '[' 2 -ne 2 ']' + example_name=with-cassandra + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/with-cassandra.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-cassandra.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-cassandra.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-cassandra.yaml ++ jaeger_name=with-cassandra ++ '[' -z with-cassandra ']' ++ echo with-cassandra ++ return 0 + jaeger_name=with-cassandra + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-cassandra true 02 + '[' 3 -ne 3 ']' + jaeger=with-cassandra + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 + JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 + export JAEGER_NAME=with-cassandra + JAEGER_NAME=with-cassandra + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-sampling + '[' 1 -ne 1 ']' + test_name=examples-with-sampling + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-sampling' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-sampling\e[0m' Rendering files for test examples-with-sampling + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-cassandra + '[' examples-with-cassandra '!=' _build ']' + cd .. + mkdir -p examples-with-sampling + cd examples-with-sampling + export example_name=with-sampling + example_name=with-sampling + render_install_cassandra 00 + '[' 1 -ne 1 ']' + test_step=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-assert.yaml.template -o ./00-assert.yaml + render_install_example with-sampling 01 + '[' 2 -ne 2 ']' + example_name=with-sampling + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-sampling.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=with-sampling ++ '[' -z with-sampling ']' ++ echo with-sampling ++ return 0 + JAEGER_NAME=with-sampling + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example with-sampling 02 + '[' 2 -ne 2 ']' + example_name=with-sampling + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/with-sampling.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-sampling.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-sampling.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-sampling.yaml ++ jaeger_name=with-sampling ++ '[' -z with-sampling ']' ++ echo with-sampling ++ return 0 + jaeger_name=with-sampling + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-sampling true 02 + '[' 3 -ne 3 ']' + jaeger=with-sampling + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 + JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 + export JAEGER_NAME=with-sampling + JAEGER_NAME=with-sampling + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + start_test examples-openshift-agent-as-daemonset + '[' 1 -ne 1 ']' + test_name=examples-openshift-agent-as-daemonset + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-openshift-agent-as-daemonset' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-openshift-agent-as-daemonset\e[0m' Rendering files for test examples-openshift-agent-as-daemonset + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-sampling + '[' examples-with-sampling '!=' _build ']' + cd .. + mkdir -p examples-openshift-agent-as-daemonset + cd examples-openshift-agent-as-daemonset + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/agent-as-daemonset.yaml -o 02-install.yaml + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./02-assert.yaml + render_install_vertx 03 + '[' 1 -ne 1 ']' + test_step=03 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./03-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].env=[{"name": "JAEGER_AGENT_HOST", "valueFrom": {"fieldRef": {"apiVersion": "v1", "fieldPath": "status.hostIP"}}}]' ./03-install.yaml + render_find_service agent-as-daemonset production order 00 04 + '[' 5 -ne 5 ']' + jaeger=agent-as-daemonset + deployment_strategy=production + service_name=order + job_number=00 + test_step=04 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' production '!=' allInOne ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/find-service.yaml.template -o ./04-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + start_test examples-openshift-with-htpasswd + '[' 1 -ne 1 ']' + test_name=examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-openshift-with-htpasswd' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-openshift-with-htpasswd\e[0m' Rendering files for test examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-openshift-agent-as-daemonset + '[' examples-openshift-agent-as-daemonset '!=' _build ']' + cd .. + mkdir -p examples-openshift-with-htpasswd + cd examples-openshift-with-htpasswd + export JAEGER_NAME=with-htpasswd + JAEGER_NAME=with-htpasswd + export JAEGER_USERNAME=awesomeuser + JAEGER_USERNAME=awesomeuser + export JAEGER_PASSWORD=awesomepassword + JAEGER_PASSWORD=awesomepassword + export 'JAEGER_USER_PASSWORD_HASH=awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' + JAEGER_USER_PASSWORD_HASH='awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ echo 'awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ base64 + SECRET=YXdlc29tZXVzZXI6e1NIQX11VWRxUFZVeXFOQm1FUlUwUXhqM0tGYVpuanc9Cg== + /tmp/jaeger-tests/bin/gomplate -f ./00-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/with-htpasswd.yaml -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + export 'GET_URL_COMMAND=kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + export 'URL=https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + INSECURE=true + JAEGER_USERNAME= + JAEGER_PASSWORD= + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-unsecured.yaml + JAEGER_USERNAME=wronguser + JAEGER_PASSWORD=wrongpassword + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-check-unauthorized.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./04-check-authorized.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running examples E2E tests' Running examples E2E tests + cd tests/e2e/examples/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3595368357 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 17 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/examples-agent-as-daemonset === PAUSE kuttl/harness/examples-agent-as-daemonset === RUN kuttl/harness/examples-agent-with-priority-class === PAUSE kuttl/harness/examples-agent-with-priority-class === RUN kuttl/harness/examples-all-in-one-with-options === PAUSE kuttl/harness/examples-all-in-one-with-options === RUN kuttl/harness/examples-auto-provision-kafka === PAUSE kuttl/harness/examples-auto-provision-kafka === RUN kuttl/harness/examples-business-application-injected-sidecar === PAUSE kuttl/harness/examples-business-application-injected-sidecar === RUN kuttl/harness/examples-collector-with-priority-class === PAUSE kuttl/harness/examples-collector-with-priority-class === RUN kuttl/harness/examples-openshift-agent-as-daemonset === PAUSE kuttl/harness/examples-openshift-agent-as-daemonset === RUN kuttl/harness/examples-openshift-with-htpasswd === PAUSE kuttl/harness/examples-openshift-with-htpasswd === RUN kuttl/harness/examples-service-types === PAUSE kuttl/harness/examples-service-types === RUN kuttl/harness/examples-simple-prod === PAUSE kuttl/harness/examples-simple-prod === RUN kuttl/harness/examples-simple-prod-with-volumes === PAUSE kuttl/harness/examples-simple-prod-with-volumes === RUN kuttl/harness/examples-simplest === PAUSE kuttl/harness/examples-simplest === RUN kuttl/harness/examples-with-badger === PAUSE kuttl/harness/examples-with-badger === RUN kuttl/harness/examples-with-badger-and-volume === PAUSE kuttl/harness/examples-with-badger-and-volume === RUN kuttl/harness/examples-with-cassandra === PAUSE kuttl/harness/examples-with-cassandra === RUN kuttl/harness/examples-with-sampling === PAUSE kuttl/harness/examples-with-sampling === CONT kuttl/harness/artifacts logger.go:42: 13:02:25 | artifacts | Creating namespace: kuttl-test-fair-quagga logger.go:42: 13:02:25 | artifacts | artifacts events from ns kuttl-test-fair-quagga: logger.go:42: 13:02:25 | artifacts | Deleting namespace: kuttl-test-fair-quagga === CONT kuttl/harness/examples-service-types logger.go:42: 13:02:31 | examples-service-types | Creating namespace: kuttl-test-touched-crane logger.go:42: 13:02:31 | examples-service-types/0-install | starting test step 0-install logger.go:42: 13:02:31 | examples-service-types/0-install | Jaeger:kuttl-test-touched-crane/service-types created logger.go:42: 13:02:36 | examples-service-types/0-install | test step completed 0-install logger.go:42: 13:02:36 | examples-service-types/1-smoke-test | starting test step 1-smoke-test logger.go:42: 13:02:36 | examples-service-types/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE service-types /dev/null] logger.go:42: 13:02:38 | examples-service-types/1-smoke-test | Warning: resource jaegers/service-types is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:02:44 | examples-service-types/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://service-types-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:02:44 | examples-service-types/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:02:44 | examples-service-types/1-smoke-test | job.batch/report-span created logger.go:42: 13:02:45 | examples-service-types/1-smoke-test | job.batch/check-span created logger.go:42: 13:02:56 | examples-service-types/1-smoke-test | test step completed 1-smoke-test logger.go:42: 13:02:56 | examples-service-types/2- | starting test step 2- logger.go:42: 13:02:56 | examples-service-types/2- | test step completed 2- logger.go:42: 13:02:56 | examples-service-types | examples-service-types events from ns kuttl-test-touched-crane: logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:35 +0000 UTC Normal Pod service-types-59c8d96bfb-kd5td Scheduled Successfully assigned kuttl-test-touched-crane/service-types-59c8d96bfb-kd5td to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:35 +0000 UTC Normal Pod service-types-59c8d96bfb-kd5td AddedInterface Add eth0 [10.131.0.30/23] from ovn-kubernetes logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:35 +0000 UTC Normal Pod service-types-59c8d96bfb-kd5td.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:35 +0000 UTC Normal Pod service-types-59c8d96bfb-kd5td.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:35 +0000 UTC Normal Pod service-types-59c8d96bfb-kd5td.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:35 +0000 UTC Normal Pod service-types-59c8d96bfb-kd5td.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:35 +0000 UTC Normal Pod service-types-59c8d96bfb-kd5td.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:35 +0000 UTC Normal Pod service-types-59c8d96bfb-kd5td.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:35 +0000 UTC Normal ReplicaSet.apps service-types-59c8d96bfb SuccessfulCreate Created pod: service-types-59c8d96bfb-kd5td replicaset-controller logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:35 +0000 UTC Normal Service service-types-query EnsuringLoadBalancer Ensuring load balancer logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:35 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-59c8d96bfb to 1 deployment-controller logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:38 +0000 UTC Normal Service service-types-collector EnsuringLoadBalancer Ensuring load balancer logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:38 +0000 UTC Normal Service service-types-query EnsuredLoadBalancer Ensured load balancer logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:39 +0000 UTC Normal Pod service-types-59c8d96bfb-kd5td.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:39 +0000 UTC Normal Pod service-types-59c8d96bfb-kd5td.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:39 +0000 UTC Normal ReplicaSet.apps service-types-59c8d96bfb SuccessfulDelete Deleted pod: service-types-59c8d96bfb-kd5td replicaset-controller logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:39 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled down replica set service-types-59c8d96bfb to 0 from 1 deployment-controller logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:40 +0000 UTC Normal Pod service-types-5fcc698f57-7qdrf Scheduled Successfully assigned kuttl-test-touched-crane/service-types-5fcc698f57-7qdrf to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:40 +0000 UTC Normal Pod service-types-5fcc698f57-7qdrf AddedInterface Add eth0 [10.131.0.31/23] from ovn-kubernetes logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:40 +0000 UTC Normal Pod service-types-5fcc698f57-7qdrf.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:40 +0000 UTC Normal ReplicaSet.apps service-types-5fcc698f57 SuccessfulCreate Created pod: service-types-5fcc698f57-7qdrf replicaset-controller logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:40 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-5fcc698f57 to 1 deployment-controller logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:41 +0000 UTC Normal Pod service-types-5fcc698f57-7qdrf.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:41 +0000 UTC Normal Pod service-types-5fcc698f57-7qdrf.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:41 +0000 UTC Normal Pod service-types-5fcc698f57-7qdrf.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:41 +0000 UTC Normal Pod service-types-5fcc698f57-7qdrf.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:41 +0000 UTC Normal Pod service-types-5fcc698f57-7qdrf.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:41 +0000 UTC Normal Service service-types-collector EnsuredLoadBalancer Ensured load balancer logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:45 +0000 UTC Normal Pod check-span-mgb6z Scheduled Successfully assigned kuttl-test-touched-crane/check-span-mgb6z to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:45 +0000 UTC Normal Pod check-span-mgb6z AddedInterface Add eth0 [10.128.2.34/23] from ovn-kubernetes logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:45 +0000 UTC Normal Pod check-span-mgb6z.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:45 +0000 UTC Normal Pod check-span-mgb6z.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:45 +0000 UTC Normal Pod check-span-mgb6z.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:45 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-mgb6z job-controller logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:45 +0000 UTC Normal Pod report-span-kfzdb Scheduled Successfully assigned kuttl-test-touched-crane/report-span-kfzdb to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:45 +0000 UTC Normal Pod report-span-kfzdb AddedInterface Add eth0 [10.129.2.62/23] from ovn-kubernetes logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:45 +0000 UTC Normal Pod report-span-kfzdb.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:45 +0000 UTC Normal Pod report-span-kfzdb.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:45 +0000 UTC Normal Pod report-span-kfzdb.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:45 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-kfzdb job-controller logger.go:42: 13:02:56 | examples-service-types | 2023-10-02 13:02:55 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:02:56 | examples-service-types | Deleting namespace: kuttl-test-touched-crane === CONT kuttl/harness/examples-with-sampling logger.go:42: 13:03:23 | examples-with-sampling | Creating namespace: kuttl-test-robust-ringtail logger.go:42: 13:03:23 | examples-with-sampling/0-install | starting test step 0-install logger.go:42: 13:03:23 | examples-with-sampling/0-install | running command: [sh -c cd /tmp/jaeger-tests && make cassandra STORAGE_NAMESPACE=$NAMESPACE] logger.go:42: 13:03:23 | examples-with-sampling/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 13:03:23 | examples-with-sampling/0-install | >>>> Creating namespace kuttl-test-robust-ringtail logger.go:42: 13:03:23 | examples-with-sampling/0-install | kubectl create namespace kuttl-test-robust-ringtail 2>&1 | grep -v "already exists" || true logger.go:42: 13:03:23 | examples-with-sampling/0-install | kubectl create -f ./tests/cassandra.yml --namespace kuttl-test-robust-ringtail 2>&1 | grep -v "already exists" || true logger.go:42: 13:03:24 | examples-with-sampling/0-install | service/cassandra created logger.go:42: 13:03:24 | examples-with-sampling/0-install | statefulset.apps/cassandra created logger.go:42: 13:03:24 | examples-with-sampling/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 13:03:35 | examples-with-sampling/0-install | test step completed 0-install logger.go:42: 13:03:35 | examples-with-sampling/1-install | starting test step 1-install logger.go:42: 13:03:35 | examples-with-sampling/1-install | Jaeger:kuttl-test-robust-ringtail/with-sampling created logger.go:42: 13:03:41 | examples-with-sampling/1-install | test step completed 1-install logger.go:42: 13:03:41 | examples-with-sampling/2-smoke-test | starting test step 2-smoke-test logger.go:42: 13:03:41 | examples-with-sampling/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-sampling /dev/null] logger.go:42: 13:03:43 | examples-with-sampling/2-smoke-test | Warning: resource jaegers/with-sampling is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:03:49 | examples-with-sampling/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:03:49 | examples-with-sampling/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:03:50 | examples-with-sampling/2-smoke-test | job.batch/report-span created logger.go:42: 13:03:50 | examples-with-sampling/2-smoke-test | job.batch/check-span created logger.go:42: 13:04:02 | examples-with-sampling/2-smoke-test | test step completed 2-smoke-test logger.go:42: 13:04:02 | examples-with-sampling/3- | starting test step 3- logger.go:42: 13:04:02 | examples-with-sampling/3- | test step completed 3- logger.go:42: 13:04:02 | examples-with-sampling | examples-with-sampling events from ns kuttl-test-robust-ringtail: logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:24 +0000 UTC Normal Pod cassandra-0 Scheduled Successfully assigned kuttl-test-robust-ringtail/cassandra-0 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:24 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-0 in StatefulSet cassandra successful statefulset-controller logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:25 +0000 UTC Normal Pod cassandra-0 AddedInterface Add eth0 [10.129.2.63/23] from ovn-kubernetes logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:25 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulling Pulling image "cassandra:3.11" kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:30 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulled Successfully pulled image "cassandra:3.11" in 5.255240704s (5.255252534s including waiting) kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:30 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:30 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:30 +0000 UTC Normal Pod cassandra-1 Scheduled Successfully assigned kuttl-test-robust-ringtail/cassandra-1 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:30 +0000 UTC Normal Pod cassandra-1 AddedInterface Add eth0 [10.131.0.32/23] from ovn-kubernetes logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:30 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-1 in StatefulSet cassandra successful statefulset-controller logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:31 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulling Pulling image "cassandra:3.11" kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:34 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulled Successfully pulled image "cassandra:3.11" in 3.85855176s (3.85856742s including waiting) kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:34 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:35 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:39 +0000 UTC Normal Pod with-sampling-564747745f-fjjhr Scheduled Successfully assigned kuttl-test-robust-ringtail/with-sampling-564747745f-fjjhr to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:39 +0000 UTC Normal Pod with-sampling-564747745f-fjjhr AddedInterface Add eth0 [10.131.0.33/23] from ovn-kubernetes logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:39 +0000 UTC Normal Pod with-sampling-564747745f-fjjhr.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:39 +0000 UTC Normal Pod with-sampling-564747745f-fjjhr.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:39 +0000 UTC Normal Pod with-sampling-564747745f-fjjhr.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:39 +0000 UTC Normal Pod with-sampling-564747745f-fjjhr.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:39 +0000 UTC Normal Pod with-sampling-564747745f-fjjhr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:39 +0000 UTC Normal Pod with-sampling-564747745f-fjjhr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:39 +0000 UTC Normal ReplicaSet.apps with-sampling-564747745f SuccessfulCreate Created pod: with-sampling-564747745f-fjjhr replicaset-controller logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:39 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled up replica set with-sampling-564747745f to 1 deployment-controller logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:44 +0000 UTC Normal Pod with-sampling-564747745f-fjjhr.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:44 +0000 UTC Normal Pod with-sampling-564747745f-fjjhr.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:44 +0000 UTC Normal ReplicaSet.apps with-sampling-564747745f SuccessfulDelete Deleted pod: with-sampling-564747745f-fjjhr replicaset-controller logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:44 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled down replica set with-sampling-564747745f to 0 from 1 deployment-controller logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:45 +0000 UTC Normal Pod with-sampling-7856cc5f78-vk8zz Scheduled Successfully assigned kuttl-test-robust-ringtail/with-sampling-7856cc5f78-vk8zz to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:45 +0000 UTC Normal ReplicaSet.apps with-sampling-7856cc5f78 SuccessfulCreate Created pod: with-sampling-7856cc5f78-vk8zz replicaset-controller logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:45 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled up replica set with-sampling-7856cc5f78 to 1 deployment-controller logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:46 +0000 UTC Normal Pod with-sampling-7856cc5f78-vk8zz AddedInterface Add eth0 [10.131.0.34/23] from ovn-kubernetes logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:46 +0000 UTC Normal Pod with-sampling-7856cc5f78-vk8zz.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:46 +0000 UTC Normal Pod with-sampling-7856cc5f78-vk8zz.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:46 +0000 UTC Normal Pod with-sampling-7856cc5f78-vk8zz.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:46 +0000 UTC Normal Pod with-sampling-7856cc5f78-vk8zz.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:46 +0000 UTC Normal Pod with-sampling-7856cc5f78-vk8zz.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:46 +0000 UTC Normal Pod with-sampling-7856cc5f78-vk8zz.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:50 +0000 UTC Normal Pod check-span-zggnl Scheduled Successfully assigned kuttl-test-robust-ringtail/check-span-zggnl to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:50 +0000 UTC Normal Pod check-span-zggnl AddedInterface Add eth0 [10.129.2.64/23] from ovn-kubernetes logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:50 +0000 UTC Normal Pod check-span-zggnl.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:50 +0000 UTC Normal Pod check-span-zggnl.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:50 +0000 UTC Normal Pod check-span-zggnl.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:50 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-zggnl job-controller logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:50 +0000 UTC Normal Pod report-span-9m99l Scheduled Successfully assigned kuttl-test-robust-ringtail/report-span-9m99l to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:50 +0000 UTC Normal Pod report-span-9m99l AddedInterface Add eth0 [10.128.2.35/23] from ovn-kubernetes logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:50 +0000 UTC Normal Pod report-span-9m99l.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:50 +0000 UTC Normal Pod report-span-9m99l.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:50 +0000 UTC Normal Pod report-span-9m99l.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:03:50 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-9m99l job-controller logger.go:42: 13:04:02 | examples-with-sampling | 2023-10-02 13:04:01 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:04:02 | examples-with-sampling | Deleting namespace: kuttl-test-robust-ringtail === CONT kuttl/harness/examples-with-cassandra logger.go:42: 13:04:21 | examples-with-cassandra | Creating namespace: kuttl-test-hot-hedgehog logger.go:42: 13:04:21 | examples-with-cassandra/0-install | starting test step 0-install logger.go:42: 13:04:21 | examples-with-cassandra/0-install | running command: [sh -c cd /tmp/jaeger-tests && make cassandra STORAGE_NAMESPACE=$NAMESPACE] logger.go:42: 13:04:21 | examples-with-cassandra/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 13:04:21 | examples-with-cassandra/0-install | >>>> Creating namespace kuttl-test-hot-hedgehog logger.go:42: 13:04:21 | examples-with-cassandra/0-install | kubectl create namespace kuttl-test-hot-hedgehog 2>&1 | grep -v "already exists" || true logger.go:42: 13:04:21 | examples-with-cassandra/0-install | kubectl create -f ./tests/cassandra.yml --namespace kuttl-test-hot-hedgehog 2>&1 | grep -v "already exists" || true logger.go:42: 13:04:22 | examples-with-cassandra/0-install | service/cassandra created logger.go:42: 13:04:22 | examples-with-cassandra/0-install | statefulset.apps/cassandra created logger.go:42: 13:04:22 | examples-with-cassandra/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 13:04:24 | examples-with-cassandra/0-install | test step completed 0-install logger.go:42: 13:04:24 | examples-with-cassandra/1-install | starting test step 1-install logger.go:42: 13:04:24 | examples-with-cassandra/1-install | Jaeger:kuttl-test-hot-hedgehog/with-cassandra created logger.go:42: 13:04:57 | examples-with-cassandra/1-install | test step completed 1-install logger.go:42: 13:04:57 | examples-with-cassandra/2-smoke-test | starting test step 2-smoke-test logger.go:42: 13:04:57 | examples-with-cassandra/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-cassandra /dev/null] logger.go:42: 13:04:58 | examples-with-cassandra/2-smoke-test | Warning: resource jaegers/with-cassandra is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:05:04 | examples-with-cassandra/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:05:05 | examples-with-cassandra/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:05:05 | examples-with-cassandra/2-smoke-test | job.batch/report-span created logger.go:42: 13:05:05 | examples-with-cassandra/2-smoke-test | job.batch/check-span created logger.go:42: 13:05:17 | examples-with-cassandra/2-smoke-test | test step completed 2-smoke-test logger.go:42: 13:05:17 | examples-with-cassandra | examples-with-cassandra events from ns kuttl-test-hot-hedgehog: logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:22 +0000 UTC Normal Pod cassandra-0 Scheduled Successfully assigned kuttl-test-hot-hedgehog/cassandra-0 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:22 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-0 in StatefulSet cassandra successful statefulset-controller logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:23 +0000 UTC Normal Pod cassandra-0 AddedInterface Add eth0 [10.129.2.65/23] from ovn-kubernetes logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:23 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulled Container image "cassandra:3.11" already present on machine kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:23 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:23 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:23 +0000 UTC Normal Pod cassandra-1 Scheduled Successfully assigned kuttl-test-hot-hedgehog/cassandra-1 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:23 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-1 in StatefulSet cassandra successful statefulset-controller logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:24 +0000 UTC Normal Pod cassandra-1 AddedInterface Add eth0 [10.131.0.35/23] from ovn-kubernetes logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:24 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulled Container image "cassandra:3.11" already present on machine kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:24 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:24 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:28 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-v8sxr Scheduled Successfully assigned kuttl-test-hot-hedgehog/with-cassandra-cassandra-schema-job-v8sxr to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:28 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-v8sxr AddedInterface Add eth0 [10.128.2.36/23] from ovn-kubernetes logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:28 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-v8sxr.spec.containers{with-cassandra-cassandra-schema-job} Pulling Pulling image "jaegertracing/jaeger-cassandra-schema:1.47.0" kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:28 +0000 UTC Normal Job.batch with-cassandra-cassandra-schema-job SuccessfulCreate Created pod: with-cassandra-cassandra-schema-job-v8sxr job-controller logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:33 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-v8sxr.spec.containers{with-cassandra-cassandra-schema-job} Pulled Successfully pulled image "jaegertracing/jaeger-cassandra-schema:1.47.0" in 4.881391091s (4.881405732s including waiting) kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:33 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-v8sxr.spec.containers{with-cassandra-cassandra-schema-job} Created Created container with-cassandra-cassandra-schema-job kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:33 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-v8sxr.spec.containers{with-cassandra-cassandra-schema-job} Started Started container with-cassandra-cassandra-schema-job kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:40 +0000 UTC Normal Pod with-cassandra-6cd979ff4d-zg2zg Scheduled Successfully assigned kuttl-test-hot-hedgehog/with-cassandra-6cd979ff4d-zg2zg to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:40 +0000 UTC Normal Pod with-cassandra-6cd979ff4d-zg2zg AddedInterface Add eth0 [10.131.0.36/23] from ovn-kubernetes logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:40 +0000 UTC Normal Pod with-cassandra-6cd979ff4d-zg2zg.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:40 +0000 UTC Normal Pod with-cassandra-6cd979ff4d-zg2zg.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:40 +0000 UTC Normal Pod with-cassandra-6cd979ff4d-zg2zg.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:40 +0000 UTC Normal Pod with-cassandra-6cd979ff4d-zg2zg.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:40 +0000 UTC Normal Pod with-cassandra-6cd979ff4d-zg2zg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:40 +0000 UTC Normal Pod with-cassandra-6cd979ff4d-zg2zg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:40 +0000 UTC Normal ReplicaSet.apps with-cassandra-6cd979ff4d SuccessfulCreate Created pod: with-cassandra-6cd979ff4d-zg2zg replicaset-controller logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:40 +0000 UTC Normal Job.batch with-cassandra-cassandra-schema-job Completed Job completed job-controller logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:40 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled up replica set with-cassandra-6cd979ff4d to 1 deployment-controller logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:42 +0000 UTC Warning Pod with-cassandra-6cd979ff4d-zg2zg.spec.containers{jaeger} BackOff Back-off restarting failed container jaeger in pod with-cassandra-6cd979ff4d-zg2zg_kuttl-test-hot-hedgehog(437fa717-fa57-4d0f-aba8-2ed82986ec47) kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:59 +0000 UTC Normal Pod with-cassandra-6cd979ff4d-zg2zg.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:59 +0000 UTC Normal Pod with-cassandra-6cd979ff4d-zg2zg.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:59 +0000 UTC Normal ReplicaSet.apps with-cassandra-6cd979ff4d SuccessfulDelete Deleted pod: with-cassandra-6cd979ff4d-zg2zg replicaset-controller logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:04:59 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled down replica set with-cassandra-6cd979ff4d to 0 from 1 deployment-controller logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:00 +0000 UTC Normal Pod with-cassandra-d7dbbb8d-zkbqh Scheduled Successfully assigned kuttl-test-hot-hedgehog/with-cassandra-d7dbbb8d-zkbqh to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:00 +0000 UTC Normal ReplicaSet.apps with-cassandra-d7dbbb8d SuccessfulCreate Created pod: with-cassandra-d7dbbb8d-zkbqh replicaset-controller logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:00 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled up replica set with-cassandra-d7dbbb8d to 1 deployment-controller logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:01 +0000 UTC Normal Pod with-cassandra-d7dbbb8d-zkbqh AddedInterface Add eth0 [10.131.0.37/23] from ovn-kubernetes logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:01 +0000 UTC Normal Pod with-cassandra-d7dbbb8d-zkbqh.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:01 +0000 UTC Normal Pod with-cassandra-d7dbbb8d-zkbqh.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:01 +0000 UTC Normal Pod with-cassandra-d7dbbb8d-zkbqh.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:01 +0000 UTC Normal Pod with-cassandra-d7dbbb8d-zkbqh.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:01 +0000 UTC Normal Pod with-cassandra-d7dbbb8d-zkbqh.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:01 +0000 UTC Normal Pod with-cassandra-d7dbbb8d-zkbqh.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:05 +0000 UTC Normal Pod check-span-g2dvx Scheduled Successfully assigned kuttl-test-hot-hedgehog/check-span-g2dvx to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:05 +0000 UTC Normal Pod check-span-g2dvx AddedInterface Add eth0 [10.129.2.66/23] from ovn-kubernetes logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:05 +0000 UTC Normal Pod check-span-g2dvx.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:05 +0000 UTC Normal Pod check-span-g2dvx.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:05 +0000 UTC Normal Pod check-span-g2dvx.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:05 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-g2dvx job-controller logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:05 +0000 UTC Normal Pod report-span-78c7n Scheduled Successfully assigned kuttl-test-hot-hedgehog/report-span-78c7n to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:05 +0000 UTC Normal Pod report-span-78c7n AddedInterface Add eth0 [10.128.2.37/23] from ovn-kubernetes logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:05 +0000 UTC Normal Pod report-span-78c7n.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:05 +0000 UTC Normal Pod report-span-78c7n.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:05 +0000 UTC Normal Pod report-span-78c7n.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:05 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-78c7n job-controller logger.go:42: 13:05:17 | examples-with-cassandra | 2023-10-02 13:05:16 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:05:17 | examples-with-cassandra | Deleting namespace: kuttl-test-hot-hedgehog === CONT kuttl/harness/examples-with-badger-and-volume logger.go:42: 13:05:30 | examples-with-badger-and-volume | Creating namespace: kuttl-test-boss-magpie logger.go:42: 13:05:30 | examples-with-badger-and-volume/0-install | starting test step 0-install logger.go:42: 13:05:30 | examples-with-badger-and-volume/0-install | Jaeger:kuttl-test-boss-magpie/with-badger-and-volume created logger.go:42: 13:05:36 | examples-with-badger-and-volume/0-install | test step completed 0-install logger.go:42: 13:05:36 | examples-with-badger-and-volume/1-smoke-test | starting test step 1-smoke-test logger.go:42: 13:05:36 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-badger-and-volume /dev/null] logger.go:42: 13:05:37 | examples-with-badger-and-volume/1-smoke-test | Warning: resource jaegers/with-badger-and-volume is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:05:43 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:05:44 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:05:44 | examples-with-badger-and-volume/1-smoke-test | job.batch/report-span created logger.go:42: 13:05:44 | examples-with-badger-and-volume/1-smoke-test | job.batch/check-span created logger.go:42: 13:12:45 | examples-with-badger-and-volume/1-smoke-test | test step failed 1-smoke-test case.go:364: failed in step 1-smoke-test case.go:366: --- Job:kuttl-test-boss-magpie/check-span +++ Job:kuttl-test-boss-magpie/check-span @@ -1,8 +1,142 @@ apiVersion: batch/v1 kind: Job metadata: + annotations: + batch.kubernetes.io/job-tracking: "" + kubectl.kubernetes.io/last-applied-configuration: | + {"apiVersion":"batch/v1","kind":"Job","metadata":{"annotations":{},"name":"check-span","namespace":"kuttl-test-boss-magpie"},"spec":{"backoffLimit":15,"template":{"spec":{"containers":[{"command":["./query"],"env":[{"name":"SERVICE_NAME","value":"smoke-test-service"},{"name":"QUERY_HOST","value":"https://with-badger-and-volume-query:443"},{"name":"SECRET_PATH","value":"/var/run/secrets/api-token/token"}],"image":"registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678","name":"asserts-container","volumeMounts":[{"mountPath":"/var/run/secrets/api-token","name":"token-api-volume"}]}],"restartPolicy":"OnFailure","volumes":[{"name":"token-api-volume","secret":{"secretName":"e2e-test"}}]}}}} + labels: + batch.kubernetes.io/controller-uid: 883810dd-d613-4736-ab2e-8f0e250a7aa2 + batch.kubernetes.io/job-name: check-span + controller-uid: 883810dd-d613-4736-ab2e-8f0e250a7aa2 + job-name: check-span + managedFields: + - apiVersion: batch/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:kubectl.kubernetes.io/last-applied-configuration: {} + f:spec: + f:backoffLimit: {} + f:completionMode: {} + f:completions: {} + f:parallelism: {} + f:suspend: {} + f:template: + f:spec: + f:containers: + k:{"name":"asserts-container"}: + .: {} + f:command: {} + f:env: + .: {} + k:{"name":"QUERY_HOST"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SECRET_PATH"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SERVICE_NAME"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:name: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/var/run/secrets/api-token"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"token-api-volume"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: kubectl-client-side-apply + operation: Update + time: "2023-10-02T13:05:44Z" + - apiVersion: batch/v1 + fieldsType: FieldsV1 + fieldsV1: + f:status: + f:active: {} + f:ready: {} + f:startTime: {} + f:uncountedTerminatedPods: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2023-10-02T13:11:04Z" name: check-span namespace: kuttl-test-boss-magpie +spec: + backoffLimit: 15 + completionMode: NonIndexed + completions: 1 + parallelism: 1 + selector: + matchLabels: + batch.kubernetes.io/controller-uid: 883810dd-d613-4736-ab2e-8f0e250a7aa2 + suspend: false + template: + metadata: + creationTimestamp: null + labels: + batch.kubernetes.io/controller-uid: 883810dd-d613-4736-ab2e-8f0e250a7aa2 + batch.kubernetes.io/job-name: check-span + controller-uid: 883810dd-d613-4736-ab2e-8f0e250a7aa2 + job-name: check-span + spec: + containers: + - command: + - ./query + env: + - name: SERVICE_NAME + value: smoke-test-service + - name: QUERY_HOST + value: https://with-badger-and-volume-query:443 + - name: SECRET_PATH + value: /var/run/secrets/api-token/token + image: registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 + imagePullPolicy: IfNotPresent + name: asserts-container + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /var/run/secrets/api-token + name: token-api-volume + dnsPolicy: ClusterFirst + restartPolicy: OnFailure + schedulerName: default-scheduler + securityContext: {} + terminationGracePeriodSeconds: 30 + volumes: + - name: token-api-volume + secret: + defaultMode: 420 + secretName: e2e-test status: - succeeded: 1 + active: 1 + ready: 1 + startTime: "2023-10-02T13:05:44Z" + uncountedTerminatedPods: {} case.go:366: resource Job:kuttl-test-boss-magpie/check-span: .status.succeeded: key is missing from map logger.go:42: 13:12:45 | examples-with-badger-and-volume | examples-with-badger-and-volume events from ns kuttl-test-boss-magpie: logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:33 +0000 UTC Normal Pod with-badger-and-volume-86c65d5d67-dnlh8 Scheduled Successfully assigned kuttl-test-boss-magpie/with-badger-and-volume-86c65d5d67-dnlh8 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:33 +0000 UTC Normal ReplicaSet.apps with-badger-and-volume-86c65d5d67 SuccessfulCreate Created pod: with-badger-and-volume-86c65d5d67-dnlh8 replicaset-controller logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:33 +0000 UTC Normal Deployment.apps with-badger-and-volume ScalingReplicaSet Scaled up replica set with-badger-and-volume-86c65d5d67 to 1 deployment-controller logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:34 +0000 UTC Normal Pod with-badger-and-volume-86c65d5d67-dnlh8 AddedInterface Add eth0 [10.131.0.38/23] from ovn-kubernetes logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:34 +0000 UTC Normal Pod with-badger-and-volume-86c65d5d67-dnlh8.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:34 +0000 UTC Normal Pod with-badger-and-volume-86c65d5d67-dnlh8.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:34 +0000 UTC Normal Pod with-badger-and-volume-86c65d5d67-dnlh8.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:34 +0000 UTC Normal Pod with-badger-and-volume-86c65d5d67-dnlh8.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:34 +0000 UTC Normal Pod with-badger-and-volume-86c65d5d67-dnlh8.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:34 +0000 UTC Normal Pod with-badger-and-volume-86c65d5d67-dnlh8.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:44 +0000 UTC Normal Pod report-span-w5nhl Scheduled Successfully assigned kuttl-test-boss-magpie/report-span-w5nhl to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:44 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-w5nhl job-controller logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:45 +0000 UTC Normal Pod check-span-wmxlj Scheduled Successfully assigned kuttl-test-boss-magpie/check-span-wmxlj to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:45 +0000 UTC Normal Pod check-span-wmxlj AddedInterface Add eth0 [10.128.2.38/23] from ovn-kubernetes logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:45 +0000 UTC Normal Pod check-span-wmxlj.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:45 +0000 UTC Normal Pod check-span-wmxlj.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:45 +0000 UTC Normal Pod check-span-wmxlj.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:45 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-wmxlj job-controller logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:45 +0000 UTC Normal Pod report-span-w5nhl AddedInterface Add eth0 [10.129.2.67/23] from ovn-kubernetes logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:45 +0000 UTC Normal Pod report-span-w5nhl.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:45 +0000 UTC Normal Pod report-span-w5nhl.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:05:45 +0000 UTC Normal Pod report-span-w5nhl.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:12:45 | examples-with-badger-and-volume | 2023-10-02 13:10:50 +0000 UTC Warning Pod check-span-wmxlj.spec.containers{asserts-container} BackOff Back-off restarting failed container asserts-container in pod check-span-wmxlj_kuttl-test-boss-magpie(dfb7a3dc-bf26-4295-94d0-cbf39da8fe30) kubelet logger.go:42: 13:12:45 | examples-with-badger-and-volume | Deleting namespace: kuttl-test-boss-magpie === CONT kuttl/harness/examples-with-badger logger.go:42: 13:12:57 | examples-with-badger | Creating namespace: kuttl-test-noted-ferret logger.go:42: 13:12:57 | examples-with-badger/0-install | starting test step 0-install logger.go:42: 13:12:57 | examples-with-badger/0-install | Jaeger:kuttl-test-noted-ferret/with-badger created logger.go:42: 13:13:05 | examples-with-badger/0-install | test step completed 0-install logger.go:42: 13:13:05 | examples-with-badger/1-smoke-test | starting test step 1-smoke-test logger.go:42: 13:13:05 | examples-with-badger/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-badger /dev/null] logger.go:42: 13:13:06 | examples-with-badger/1-smoke-test | Warning: resource jaegers/with-badger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:13:12 | examples-with-badger/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:13:12 | examples-with-badger/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:13:13 | examples-with-badger/1-smoke-test | job.batch/report-span created logger.go:42: 13:13:13 | examples-with-badger/1-smoke-test | job.batch/check-span created logger.go:42: 13:13:24 | examples-with-badger/1-smoke-test | test step completed 1-smoke-test logger.go:42: 13:13:24 | examples-with-badger | examples-with-badger events from ns kuttl-test-noted-ferret: logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:01 +0000 UTC Normal Pod with-badger-6948c4695-ksfx8 Scheduled Successfully assigned kuttl-test-noted-ferret/with-badger-6948c4695-ksfx8 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:01 +0000 UTC Normal Pod with-badger-6948c4695-ksfx8 AddedInterface Add eth0 [10.131.0.39/23] from ovn-kubernetes logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:01 +0000 UTC Normal Pod with-badger-6948c4695-ksfx8.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:01 +0000 UTC Normal ReplicaSet.apps with-badger-6948c4695 SuccessfulCreate Created pod: with-badger-6948c4695-ksfx8 replicaset-controller logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:01 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-6948c4695 to 1 deployment-controller logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:02 +0000 UTC Normal Pod with-badger-6948c4695-ksfx8.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:02 +0000 UTC Normal Pod with-badger-6948c4695-ksfx8.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:02 +0000 UTC Normal Pod with-badger-6948c4695-ksfx8.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:02 +0000 UTC Normal Pod with-badger-6948c4695-ksfx8.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:02 +0000 UTC Normal Pod with-badger-6948c4695-ksfx8.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:08 +0000 UTC Normal Pod with-badger-6948c4695-ksfx8.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:08 +0000 UTC Normal Pod with-badger-6948c4695-ksfx8.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:08 +0000 UTC Normal ReplicaSet.apps with-badger-6948c4695 SuccessfulDelete Deleted pod: with-badger-6948c4695-ksfx8 replicaset-controller logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:08 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled down replica set with-badger-6948c4695 to 0 from 1 deployment-controller logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:09 +0000 UTC Normal Pod with-badger-5f64db48fc-9zfmp Scheduled Successfully assigned kuttl-test-noted-ferret/with-badger-5f64db48fc-9zfmp to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:09 +0000 UTC Normal Pod with-badger-5f64db48fc-9zfmp AddedInterface Add eth0 [10.129.2.68/23] from ovn-kubernetes logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:09 +0000 UTC Normal Pod with-badger-5f64db48fc-9zfmp.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:09 +0000 UTC Normal Pod with-badger-5f64db48fc-9zfmp.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:09 +0000 UTC Normal Pod with-badger-5f64db48fc-9zfmp.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:09 +0000 UTC Normal Pod with-badger-5f64db48fc-9zfmp.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:09 +0000 UTC Normal ReplicaSet.apps with-badger-5f64db48fc SuccessfulCreate Created pod: with-badger-5f64db48fc-9zfmp replicaset-controller logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:09 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-5f64db48fc to 1 deployment-controller logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:10 +0000 UTC Normal Pod with-badger-5f64db48fc-9zfmp.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:10 +0000 UTC Normal Pod with-badger-5f64db48fc-9zfmp.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:13 +0000 UTC Normal Pod check-span-xgv55 Scheduled Successfully assigned kuttl-test-noted-ferret/check-span-xgv55 to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:13 +0000 UTC Normal Pod check-span-xgv55 AddedInterface Add eth0 [10.128.2.39/23] from ovn-kubernetes logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:13 +0000 UTC Normal Pod check-span-xgv55.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:13 +0000 UTC Normal Pod check-span-xgv55.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:13 +0000 UTC Normal Pod check-span-xgv55.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:13 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-xgv55 job-controller logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:13 +0000 UTC Normal Pod report-span-545d9 Scheduled Successfully assigned kuttl-test-noted-ferret/report-span-545d9 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:13 +0000 UTC Normal Pod report-span-545d9 AddedInterface Add eth0 [10.131.0.40/23] from ovn-kubernetes logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:13 +0000 UTC Normal Pod report-span-545d9.spec.containers{report-span} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" kubelet logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:13 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-545d9 job-controller logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:15 +0000 UTC Normal Pod report-span-545d9.spec.containers{report-span} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" in 1.439031742s (1.439053673s including waiting) kubelet logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:15 +0000 UTC Normal Pod report-span-545d9.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:15 +0000 UTC Normal Pod report-span-545d9.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:13:24 | examples-with-badger | 2023-10-02 13:13:24 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:13:24 | examples-with-badger | Deleting namespace: kuttl-test-noted-ferret === CONT kuttl/harness/examples-simplest logger.go:42: 13:13:36 | examples-simplest | Creating namespace: kuttl-test-robust-mule logger.go:42: 13:13:36 | examples-simplest/0-install | starting test step 0-install logger.go:42: 13:13:36 | examples-simplest/0-install | Jaeger:kuttl-test-robust-mule/simplest created logger.go:42: 13:13:42 | examples-simplest/0-install | test step completed 0-install logger.go:42: 13:13:42 | examples-simplest/1-smoke-test | starting test step 1-smoke-test logger.go:42: 13:13:42 | examples-simplest/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 13:13:43 | examples-simplest/1-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:13:50 | examples-simplest/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:13:50 | examples-simplest/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:13:50 | examples-simplest/1-smoke-test | job.batch/report-span created logger.go:42: 13:13:50 | examples-simplest/1-smoke-test | job.batch/check-span created logger.go:42: 13:14:02 | examples-simplest/1-smoke-test | test step completed 1-smoke-test logger.go:42: 13:14:02 | examples-simplest | examples-simplest events from ns kuttl-test-robust-mule: logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:40 +0000 UTC Normal Pod simplest-76df49dc7d-6z6l5 Scheduled Successfully assigned kuttl-test-robust-mule/simplest-76df49dc7d-6z6l5 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:40 +0000 UTC Normal Pod simplest-76df49dc7d-6z6l5 AddedInterface Add eth0 [10.131.0.41/23] from ovn-kubernetes logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:40 +0000 UTC Normal Pod simplest-76df49dc7d-6z6l5.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:40 +0000 UTC Normal Pod simplest-76df49dc7d-6z6l5.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:40 +0000 UTC Normal Pod simplest-76df49dc7d-6z6l5.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:40 +0000 UTC Normal Pod simplest-76df49dc7d-6z6l5.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:40 +0000 UTC Normal ReplicaSet.apps simplest-76df49dc7d SuccessfulCreate Created pod: simplest-76df49dc7d-6z6l5 replicaset-controller logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:40 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-76df49dc7d to 1 deployment-controller logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:41 +0000 UTC Normal Pod simplest-76df49dc7d-6z6l5.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:41 +0000 UTC Normal Pod simplest-76df49dc7d-6z6l5.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:46 +0000 UTC Normal Pod simplest-76df49dc7d-6z6l5.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:46 +0000 UTC Normal Pod simplest-76df49dc7d-6z6l5.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:46 +0000 UTC Normal ReplicaSet.apps simplest-76df49dc7d SuccessfulDelete Deleted pod: simplest-76df49dc7d-6z6l5 replicaset-controller logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:46 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-76df49dc7d to 0 from 1 deployment-controller logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:47 +0000 UTC Normal Pod simplest-5ccff65cf4-cqndx Scheduled Successfully assigned kuttl-test-robust-mule/simplest-5ccff65cf4-cqndx to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:47 +0000 UTC Normal Pod simplest-5ccff65cf4-cqndx AddedInterface Add eth0 [10.129.2.69/23] from ovn-kubernetes logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:47 +0000 UTC Normal Pod simplest-5ccff65cf4-cqndx.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:47 +0000 UTC Normal Pod simplest-5ccff65cf4-cqndx.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:47 +0000 UTC Normal Pod simplest-5ccff65cf4-cqndx.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:47 +0000 UTC Normal Pod simplest-5ccff65cf4-cqndx.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:47 +0000 UTC Normal Pod simplest-5ccff65cf4-cqndx.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:47 +0000 UTC Normal Pod simplest-5ccff65cf4-cqndx.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:47 +0000 UTC Normal ReplicaSet.apps simplest-5ccff65cf4 SuccessfulCreate Created pod: simplest-5ccff65cf4-cqndx replicaset-controller logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:47 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-5ccff65cf4 to 1 deployment-controller logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:50 +0000 UTC Normal Pod check-span-bb8mp Scheduled Successfully assigned kuttl-test-robust-mule/check-span-bb8mp to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:50 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-bb8mp job-controller logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:50 +0000 UTC Normal Pod report-span-pmq5s Scheduled Successfully assigned kuttl-test-robust-mule/report-span-pmq5s to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:50 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-pmq5s job-controller logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:51 +0000 UTC Normal Pod check-span-bb8mp AddedInterface Add eth0 [10.128.2.40/23] from ovn-kubernetes logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:51 +0000 UTC Normal Pod check-span-bb8mp.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:51 +0000 UTC Normal Pod check-span-bb8mp.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:51 +0000 UTC Normal Pod check-span-bb8mp.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:51 +0000 UTC Normal Pod report-span-pmq5s AddedInterface Add eth0 [10.131.0.42/23] from ovn-kubernetes logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:51 +0000 UTC Normal Pod report-span-pmq5s.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:51 +0000 UTC Normal Pod report-span-pmq5s.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:13:51 +0000 UTC Normal Pod report-span-pmq5s.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:14:02 | examples-simplest | 2023-10-02 13:14:02 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:14:02 | examples-simplest | Deleting namespace: kuttl-test-robust-mule === CONT kuttl/harness/examples-simple-prod-with-volumes logger.go:42: 13:14:09 | examples-simple-prod-with-volumes | Ignoring 03-check-volume.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:14:09 | examples-simple-prod-with-volumes | Creating namespace: kuttl-test-easy-gnu logger.go:42: 13:14:09 | examples-simple-prod-with-volumes/1-install | starting test step 1-install logger.go:42: 13:14:09 | examples-simple-prod-with-volumes/1-install | Jaeger:kuttl-test-easy-gnu/simple-prod created logger.go:42: 13:14:46 | examples-simple-prod-with-volumes/1-install | test step completed 1-install logger.go:42: 13:14:46 | examples-simple-prod-with-volumes/2-smoke-test | starting test step 2-smoke-test logger.go:42: 13:14:46 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 13:14:48 | examples-simple-prod-with-volumes/2-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:14:54 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:14:54 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:14:55 | examples-simple-prod-with-volumes/2-smoke-test | job.batch/report-span created logger.go:42: 13:14:55 | examples-simple-prod-with-volumes/2-smoke-test | job.batch/check-span created logger.go:42: 13:15:06 | examples-simple-prod-with-volumes/2-smoke-test | test step completed 2-smoke-test logger.go:42: 13:15:06 | examples-simple-prod-with-volumes/3-check-volume | starting test step 3-check-volume logger.go:42: 13:15:06 | examples-simple-prod-with-volumes/3-check-volume | running command: [sh -c kubectl exec $(kubectl get pods -n $NAMESPACE -l app=jaeger -l app.kubernetes.io/component=collector -o yaml | /tmp/jaeger-tests/bin/yq e '.items[0].metadata.name') -n $NAMESPACE -- ls /usr/share/elasticsearch/data] logger.go:42: 13:15:06 | examples-simple-prod-with-volumes/3-check-volume | test step completed 3-check-volume logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | examples-simple-prod-with-volumes events from ns kuttl-test-easy-gnu: logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:16 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesteasygnusimpleprod-1-bc49bd76f-rgt79 Scheduled Successfully assigned kuttl-test-easy-gnu/elasticsearch-cdm-kuttltesteasygnusimpleprod-1-bc49bd76f-rgt79 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:16 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltesteasygnusimpleprod-1-bc49bd76f SuccessfulCreate Created pod: elasticsearch-cdm-kuttltesteasygnusimpleprod-1-bc49bd76f-rgt79 replicaset-controller logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:16 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltesteasygnusimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltesteasygnusimpleprod-1-bc49bd76f to 1 deployment-controller logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesteasygnusimpleprod-1-bc49bd76f-rgt79 AddedInterface Add eth0 [10.131.0.43/23] from ovn-kubernetes logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesteasygnusimpleprod-1-bc49bd76f-rgt79.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesteasygnusimpleprod-1-bc49bd76f-rgt79.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesteasygnusimpleprod-1-bc49bd76f-rgt79.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesteasygnusimpleprod-1-bc49bd76f-rgt79.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesteasygnusimpleprod-1-bc49bd76f-rgt79.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesteasygnusimpleprod-1-bc49bd76f-rgt79.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:27 +0000 UTC Warning Pod elasticsearch-cdm-kuttltesteasygnusimpleprod-1-bc49bd76f-rgt79.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:32 +0000 UTC Warning Pod elasticsearch-cdm-kuttltesteasygnusimpleprod-1-bc49bd76f-rgt79.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:43 +0000 UTC Normal Pod simple-prod-collector-6978c9cd74-shtfg Scheduled Successfully assigned kuttl-test-easy-gnu/simple-prod-collector-6978c9cd74-shtfg to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:43 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-6978c9cd74 SuccessfulCreate Created pod: simple-prod-collector-6978c9cd74-shtfg replicaset-controller logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:43 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-6978c9cd74 to 1 deployment-controller logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:43 +0000 UTC Normal Pod simple-prod-query-6bd559b468-wbwgr Scheduled Successfully assigned kuttl-test-easy-gnu/simple-prod-query-6bd559b468-wbwgr to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:43 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6bd559b468 SuccessfulCreate Created pod: simple-prod-query-6bd559b468-wbwgr replicaset-controller logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:43 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-6bd559b468 to 1 deployment-controller logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:44 +0000 UTC Normal Pod simple-prod-collector-6978c9cd74-shtfg AddedInterface Add eth0 [10.129.2.70/23] from ovn-kubernetes logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:44 +0000 UTC Normal Pod simple-prod-collector-6978c9cd74-shtfg.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:44 +0000 UTC Normal Pod simple-prod-collector-6978c9cd74-shtfg.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:44 +0000 UTC Normal Pod simple-prod-collector-6978c9cd74-shtfg.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:44 +0000 UTC Normal Pod simple-prod-query-6bd559b468-wbwgr AddedInterface Add eth0 [10.129.2.71/23] from ovn-kubernetes logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:44 +0000 UTC Normal Pod simple-prod-query-6bd559b468-wbwgr.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:44 +0000 UTC Normal Pod simple-prod-query-6bd559b468-wbwgr.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:44 +0000 UTC Normal Pod simple-prod-query-6bd559b468-wbwgr.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:44 +0000 UTC Normal Pod simple-prod-query-6bd559b468-wbwgr.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:44 +0000 UTC Normal Pod simple-prod-query-6bd559b468-wbwgr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:44 +0000 UTC Normal Pod simple-prod-query-6bd559b468-wbwgr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:44 +0000 UTC Normal Pod simple-prod-query-6bd559b468-wbwgr.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:44 +0000 UTC Normal Pod simple-prod-query-6bd559b468-wbwgr.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:44 +0000 UTC Normal Pod simple-prod-query-6bd559b468-wbwgr.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:50 +0000 UTC Normal Pod simple-prod-query-6bd559b468-wbwgr.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:50 +0000 UTC Normal Pod simple-prod-query-6bd559b468-wbwgr.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:50 +0000 UTC Normal Pod simple-prod-query-6bd559b468-wbwgr.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:50 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6bd559b468 SuccessfulDelete Deleted pod: simple-prod-query-6bd559b468-wbwgr replicaset-controller logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:50 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-6bd559b468 to 0 from 1 deployment-controller logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:51 +0000 UTC Normal Pod simple-prod-query-64d79567b9-sqr7p Scheduled Successfully assigned kuttl-test-easy-gnu/simple-prod-query-64d79567b9-sqr7p to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:51 +0000 UTC Normal Pod simple-prod-query-64d79567b9-sqr7p AddedInterface Add eth0 [10.128.2.41/23] from ovn-kubernetes logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:51 +0000 UTC Normal Pod simple-prod-query-64d79567b9-sqr7p.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:51 +0000 UTC Normal Pod simple-prod-query-64d79567b9-sqr7p.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:51 +0000 UTC Normal Pod simple-prod-query-64d79567b9-sqr7p.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:51 +0000 UTC Normal Pod simple-prod-query-64d79567b9-sqr7p.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:51 +0000 UTC Normal ReplicaSet.apps simple-prod-query-64d79567b9 SuccessfulCreate Created pod: simple-prod-query-64d79567b9-sqr7p replicaset-controller logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:51 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-64d79567b9 to 1 deployment-controller logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:52 +0000 UTC Normal Pod simple-prod-query-64d79567b9-sqr7p.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:52 +0000 UTC Normal Pod simple-prod-query-64d79567b9-sqr7p.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:52 +0000 UTC Normal Pod simple-prod-query-64d79567b9-sqr7p.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:52 +0000 UTC Normal Pod simple-prod-query-64d79567b9-sqr7p.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:52 +0000 UTC Normal Pod simple-prod-query-64d79567b9-sqr7p.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:55 +0000 UTC Normal Pod check-span-74nct Scheduled Successfully assigned kuttl-test-easy-gnu/check-span-74nct to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:55 +0000 UTC Normal Pod check-span-74nct AddedInterface Add eth0 [10.129.2.73/23] from ovn-kubernetes logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:55 +0000 UTC Normal Pod check-span-74nct.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:55 +0000 UTC Normal Pod check-span-74nct.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:55 +0000 UTC Normal Pod check-span-74nct.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:55 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-74nct job-controller logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:55 +0000 UTC Normal Pod report-span-hhlrw Scheduled Successfully assigned kuttl-test-easy-gnu/report-span-hhlrw to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:55 +0000 UTC Normal Pod report-span-hhlrw AddedInterface Add eth0 [10.129.2.72/23] from ovn-kubernetes logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:55 +0000 UTC Normal Pod report-span-hhlrw.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:55 +0000 UTC Normal Pod report-span-hhlrw.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:55 +0000 UTC Normal Pod report-span-hhlrw.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:55 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-hhlrw job-controller logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:58 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:58 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:14:58 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | 2023-10-02 13:15:06 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:15:06 | examples-simple-prod-with-volumes | Deleting namespace: kuttl-test-easy-gnu === CONT kuttl/harness/examples-simple-prod logger.go:42: 13:15:53 | examples-simple-prod | Creating namespace: kuttl-test-amazing-kite logger.go:42: 13:15:53 | examples-simple-prod/1-install | starting test step 1-install logger.go:42: 13:15:53 | examples-simple-prod/1-install | Jaeger:kuttl-test-amazing-kite/simple-prod created logger.go:42: 13:16:28 | examples-simple-prod/1-install | test step completed 1-install logger.go:42: 13:16:28 | examples-simple-prod/2-smoke-test | starting test step 2-smoke-test logger.go:42: 13:16:28 | examples-simple-prod/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 13:16:29 | examples-simple-prod/2-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:16:35 | examples-simple-prod/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:16:36 | examples-simple-prod/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:16:36 | examples-simple-prod/2-smoke-test | job.batch/report-span created logger.go:42: 13:16:36 | examples-simple-prod/2-smoke-test | job.batch/check-span created logger.go:42: 13:16:47 | examples-simple-prod/2-smoke-test | test step completed 2-smoke-test logger.go:42: 13:16:47 | examples-simple-prod | examples-simple-prod events from ns kuttl-test-amazing-kite: logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:15:59 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestamazingkitesimpleprod-1-dd487fd89 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestamazingkitesimpleprod-1-dd487fdcssfq replicaset-controller logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:15:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazingkitesimpleprod-1-dd487fdcssfq Scheduled Successfully assigned kuttl-test-amazing-kite/elasticsearch-cdm-kuttltestamazingkitesimpleprod-1-dd487fdcssfq to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:15:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazingkitesimpleprod-1-dd487fdcssfq AddedInterface Add eth0 [10.131.0.44/23] from ovn-kubernetes logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:15:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazingkitesimpleprod-1-dd487fdcssfq.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:15:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazingkitesimpleprod-1-dd487fdcssfq.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:15:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazingkitesimpleprod-1-dd487fdcssfq.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:15:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazingkitesimpleprod-1-dd487fdcssfq.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:15:59 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestamazingkitesimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestamazingkitesimpleprod-1-dd487fd89 to 1 deployment-controller logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:00 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazingkitesimpleprod-1-dd487fdcssfq.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:00 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazingkitesimpleprod-1-dd487fdcssfq.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:09 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestamazingkitesimpleprod-1-dd487fdcssfq.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:14 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestamazingkitesimpleprod-1-dd487fdcssfq.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:26 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-f57l7 Scheduled Successfully assigned kuttl-test-amazing-kite/simple-prod-collector-5499b86c46-f57l7 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:26 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-f57l7 AddedInterface Add eth0 [10.129.2.75/23] from ovn-kubernetes logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:26 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-f57l7.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:26 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-f57l7.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:26 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-f57l7.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:26 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5499b86c46 SuccessfulCreate Created pod: simple-prod-collector-5499b86c46-f57l7 replicaset-controller logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:26 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-5499b86c46 to 1 deployment-controller logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:26 +0000 UTC Normal Pod simple-prod-query-7d6898b47b-vmc2d Scheduled Successfully assigned kuttl-test-amazing-kite/simple-prod-query-7d6898b47b-vmc2d to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:26 +0000 UTC Normal Pod simple-prod-query-7d6898b47b-vmc2d AddedInterface Add eth0 [10.129.2.76/23] from ovn-kubernetes logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:26 +0000 UTC Normal Pod simple-prod-query-7d6898b47b-vmc2d.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:26 +0000 UTC Normal Pod simple-prod-query-7d6898b47b-vmc2d.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:26 +0000 UTC Normal Pod simple-prod-query-7d6898b47b-vmc2d.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:26 +0000 UTC Normal Pod simple-prod-query-7d6898b47b-vmc2d.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:26 +0000 UTC Normal Pod simple-prod-query-7d6898b47b-vmc2d.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:26 +0000 UTC Normal Pod simple-prod-query-7d6898b47b-vmc2d.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:26 +0000 UTC Normal Pod simple-prod-query-7d6898b47b-vmc2d.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:26 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7d6898b47b SuccessfulCreate Created pod: simple-prod-query-7d6898b47b-vmc2d replicaset-controller logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:26 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-7d6898b47b to 1 deployment-controller logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:27 +0000 UTC Normal Pod simple-prod-query-7d6898b47b-vmc2d.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:27 +0000 UTC Normal Pod simple-prod-query-7d6898b47b-vmc2d.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:32 +0000 UTC Normal Pod simple-prod-query-7d6898b47b-vmc2d.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:32 +0000 UTC Normal Pod simple-prod-query-7d6898b47b-vmc2d.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:32 +0000 UTC Normal Pod simple-prod-query-7d6898b47b-vmc2d.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:32 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7d6898b47b SuccessfulDelete Deleted pod: simple-prod-query-7d6898b47b-vmc2d replicaset-controller logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:32 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-7d6898b47b to 0 from 1 deployment-controller logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:33 +0000 UTC Normal Pod simple-prod-query-756859d4b7-pfv2m Scheduled Successfully assigned kuttl-test-amazing-kite/simple-prod-query-756859d4b7-pfv2m to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:33 +0000 UTC Normal Pod simple-prod-query-756859d4b7-pfv2m AddedInterface Add eth0 [10.128.2.42/23] from ovn-kubernetes logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:33 +0000 UTC Normal ReplicaSet.apps simple-prod-query-756859d4b7 SuccessfulCreate Created pod: simple-prod-query-756859d4b7-pfv2m replicaset-controller logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:33 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-756859d4b7 to 1 deployment-controller logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:34 +0000 UTC Normal Pod simple-prod-query-756859d4b7-pfv2m.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:34 +0000 UTC Normal Pod simple-prod-query-756859d4b7-pfv2m.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:34 +0000 UTC Normal Pod simple-prod-query-756859d4b7-pfv2m.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:34 +0000 UTC Normal Pod simple-prod-query-756859d4b7-pfv2m.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:34 +0000 UTC Normal Pod simple-prod-query-756859d4b7-pfv2m.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:34 +0000 UTC Normal Pod simple-prod-query-756859d4b7-pfv2m.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:34 +0000 UTC Normal Pod simple-prod-query-756859d4b7-pfv2m.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:34 +0000 UTC Normal Pod simple-prod-query-756859d4b7-pfv2m.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:34 +0000 UTC Normal Pod simple-prod-query-756859d4b7-pfv2m.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:36 +0000 UTC Normal Pod check-span-thdbh Scheduled Successfully assigned kuttl-test-amazing-kite/check-span-thdbh to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:36 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-thdbh job-controller logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:36 +0000 UTC Normal Pod report-span-vvv96 Scheduled Successfully assigned kuttl-test-amazing-kite/report-span-vvv96 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:36 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-vvv96 job-controller logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:37 +0000 UTC Normal Pod check-span-thdbh AddedInterface Add eth0 [10.129.2.78/23] from ovn-kubernetes logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:37 +0000 UTC Normal Pod check-span-thdbh.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:37 +0000 UTC Normal Pod check-span-thdbh.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:37 +0000 UTC Normal Pod check-span-thdbh.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:37 +0000 UTC Normal Pod report-span-vvv96 AddedInterface Add eth0 [10.129.2.77/23] from ovn-kubernetes logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:37 +0000 UTC Normal Pod report-span-vvv96.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:37 +0000 UTC Normal Pod report-span-vvv96.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:37 +0000 UTC Normal Pod report-span-vvv96.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:41 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:41 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:41 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:16:47 | examples-simple-prod | 2023-10-02 13:16:47 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:16:47 | examples-simple-prod | Deleting namespace: kuttl-test-amazing-kite === CONT kuttl/harness/examples-business-application-injected-sidecar logger.go:42: 13:17:00 | examples-business-application-injected-sidecar | Creating namespace: kuttl-test-intense-beetle logger.go:42: 13:17:00 | examples-business-application-injected-sidecar/0-install | starting test step 0-install logger.go:42: 13:17:01 | examples-business-application-injected-sidecar/0-install | Deployment:kuttl-test-intense-beetle/myapp created logger.go:42: 13:17:01 | examples-business-application-injected-sidecar/0-install | test step completed 0-install logger.go:42: 13:17:01 | examples-business-application-injected-sidecar/1-install | starting test step 1-install logger.go:42: 13:17:01 | examples-business-application-injected-sidecar/1-install | Jaeger:kuttl-test-intense-beetle/simplest created logger.go:42: 13:17:12 | examples-business-application-injected-sidecar/1-install | test step completed 1-install logger.go:42: 13:17:12 | examples-business-application-injected-sidecar/2-smoke-test | starting test step 2-smoke-test logger.go:42: 13:17:12 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 13:17:13 | examples-business-application-injected-sidecar/2-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:17:20 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:17:20 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:17:20 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/report-span created logger.go:42: 13:17:20 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/check-span created logger.go:42: 13:17:31 | examples-business-application-injected-sidecar/2-smoke-test | test step completed 2-smoke-test logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | examples-business-application-injected-sidecar events from ns kuttl-test-intense-beetle: logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:01 +0000 UTC Normal Pod myapp-7c764668bd-l7ksk Scheduled Successfully assigned kuttl-test-intense-beetle/myapp-7c764668bd-l7ksk to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:01 +0000 UTC Normal Pod myapp-7c764668bd-l7ksk AddedInterface Add eth0 [10.129.2.79/23] from ovn-kubernetes logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:01 +0000 UTC Normal Pod myapp-7c764668bd-l7ksk.spec.containers{myapp} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:01 +0000 UTC Normal ReplicaSet.apps myapp-7c764668bd SuccessfulCreate Created pod: myapp-7c764668bd-l7ksk replicaset-controller logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:01 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-7c764668bd to 1 deployment-controller logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:04 +0000 UTC Normal Pod myapp-66df669fff-xtj5j Scheduled Successfully assigned kuttl-test-intense-beetle/myapp-66df669fff-xtj5j to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:04 +0000 UTC Normal ReplicaSet.apps myapp-66df669fff SuccessfulCreate Created pod: myapp-66df669fff-xtj5j replicaset-controller logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:04 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-66df669fff to 1 deployment-controller logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:05 +0000 UTC Warning Pod myapp-66df669fff-xtj5j FailedMount MountVolume.SetUp failed for volume "simplest-service-ca" : configmap "simplest-service-ca" not found kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:05 +0000 UTC Warning Pod myapp-66df669fff-xtj5j FailedMount MountVolume.SetUp failed for volume "simplest-trusted-ca" : configmap "simplest-trusted-ca" not found kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:06 +0000 UTC Normal Pod myapp-7c764668bd-l7ksk.spec.containers{myapp} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 4.054568318s (4.054584529s including waiting) kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:06 +0000 UTC Normal Pod myapp-7c764668bd-l7ksk.spec.containers{myapp} Created Created container myapp kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:06 +0000 UTC Normal Pod myapp-7c764668bd-l7ksk.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:09 +0000 UTC Normal Pod simplest-748b8845d5-fp5v5 Scheduled Successfully assigned kuttl-test-intense-beetle/simplest-748b8845d5-fp5v5 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:09 +0000 UTC Normal ReplicaSet.apps simplest-748b8845d5 SuccessfulCreate Created pod: simplest-748b8845d5-fp5v5 replicaset-controller logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:09 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-748b8845d5 to 1 deployment-controller logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:10 +0000 UTC Normal Pod simplest-748b8845d5-fp5v5 AddedInterface Add eth0 [10.131.0.45/23] from ovn-kubernetes logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:10 +0000 UTC Normal Pod simplest-748b8845d5-fp5v5.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:10 +0000 UTC Normal Pod simplest-748b8845d5-fp5v5.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:10 +0000 UTC Normal Pod simplest-748b8845d5-fp5v5.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:10 +0000 UTC Normal Pod simplest-748b8845d5-fp5v5.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:10 +0000 UTC Normal Pod simplest-748b8845d5-fp5v5.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:10 +0000 UTC Normal Pod simplest-748b8845d5-fp5v5.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:12 +0000 UTC Warning Pod myapp-7c764668bd-l7ksk.spec.containers{myapp} Unhealthy Liveness probe failed: Get "http://10.129.2.79:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:13 +0000 UTC Normal Pod myapp-66df669fff-xtj5j AddedInterface Add eth0 [10.128.2.43/23] from ovn-kubernetes logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:13 +0000 UTC Normal Pod myapp-66df669fff-xtj5j.spec.containers{myapp} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:15 +0000 UTC Normal Pod simplest-748b8845d5-fp5v5.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:15 +0000 UTC Normal Pod simplest-748b8845d5-fp5v5.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:15 +0000 UTC Normal ReplicaSet.apps simplest-748b8845d5 SuccessfulDelete Deleted pod: simplest-748b8845d5-fp5v5 replicaset-controller logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:15 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-748b8845d5 to 0 from 1 deployment-controller logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:16 +0000 UTC Normal Pod myapp-66df669fff-xtj5j.spec.containers{myapp} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.631870691s (3.631880322s including waiting) kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:16 +0000 UTC Normal Pod myapp-66df669fff-xtj5j.spec.containers{myapp} Created Created container myapp kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:16 +0000 UTC Normal Pod myapp-66df669fff-xtj5j.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:16 +0000 UTC Normal Pod myapp-66df669fff-xtj5j.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:16 +0000 UTC Normal Pod myapp-66df669fff-xtj5j.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:16 +0000 UTC Normal Pod myapp-66df669fff-xtj5j.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:16 +0000 UTC Normal Pod simplest-b994c6ff4-vjp84 Scheduled Successfully assigned kuttl-test-intense-beetle/simplest-b994c6ff4-vjp84 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:16 +0000 UTC Normal Pod simplest-b994c6ff4-vjp84 AddedInterface Add eth0 [10.131.0.46/23] from ovn-kubernetes logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:16 +0000 UTC Normal Pod simplest-b994c6ff4-vjp84.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:16 +0000 UTC Normal Pod simplest-b994c6ff4-vjp84.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:16 +0000 UTC Normal Pod simplest-b994c6ff4-vjp84.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:16 +0000 UTC Normal Pod simplest-b994c6ff4-vjp84.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:16 +0000 UTC Normal Pod simplest-b994c6ff4-vjp84.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:16 +0000 UTC Normal Pod simplest-b994c6ff4-vjp84.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:16 +0000 UTC Normal ReplicaSet.apps simplest-b994c6ff4 SuccessfulCreate Created pod: simplest-b994c6ff4-vjp84 replicaset-controller logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:16 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-b994c6ff4 to 1 deployment-controller logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:17 +0000 UTC Normal Pod myapp-7c764668bd-l7ksk.spec.containers{myapp} Killing Stopping container myapp kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:17 +0000 UTC Normal ReplicaSet.apps myapp-7c764668bd SuccessfulDelete Deleted pod: myapp-7c764668bd-l7ksk replicaset-controller logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:17 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled down replica set myapp-7c764668bd to 0 from 1 deployment-controller logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:20 +0000 UTC Normal Pod check-span-p4xsd Scheduled Successfully assigned kuttl-test-intense-beetle/check-span-p4xsd to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:20 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-p4xsd job-controller logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:20 +0000 UTC Normal Pod report-span-b8c57 Scheduled Successfully assigned kuttl-test-intense-beetle/report-span-b8c57 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:20 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-b8c57 job-controller logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:21 +0000 UTC Normal Pod check-span-p4xsd AddedInterface Add eth0 [10.129.2.81/23] from ovn-kubernetes logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:21 +0000 UTC Normal Pod check-span-p4xsd.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:21 +0000 UTC Normal Pod check-span-p4xsd.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:21 +0000 UTC Normal Pod check-span-p4xsd.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:21 +0000 UTC Normal Pod report-span-b8c57 AddedInterface Add eth0 [10.129.2.80/23] from ovn-kubernetes logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:21 +0000 UTC Normal Pod report-span-b8c57.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:21 +0000 UTC Normal Pod report-span-b8c57.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:21 +0000 UTC Normal Pod report-span-b8c57.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:24 +0000 UTC Warning Pod myapp-66df669fff-xtj5j.spec.containers{myapp} Unhealthy Liveness probe failed: Get "http://10.128.2.43:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | 2023-10-02 13:17:31 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:17:31 | examples-business-application-injected-sidecar | Deleting namespace: kuttl-test-intense-beetle === CONT kuttl/harness/examples-openshift-with-htpasswd logger.go:42: 13:17:45 | examples-openshift-with-htpasswd | Ignoring 00-install.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:17:45 | examples-openshift-with-htpasswd | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:17:45 | examples-openshift-with-htpasswd | Creating namespace: kuttl-test-relaxing-sparrow logger.go:42: 13:17:45 | examples-openshift-with-htpasswd/0-install | starting test step 0-install logger.go:42: 13:17:45 | examples-openshift-with-htpasswd/0-install | Secret:kuttl-test-relaxing-sparrow/htpasswd created logger.go:42: 13:17:45 | examples-openshift-with-htpasswd/0-install | test step completed 0-install logger.go:42: 13:17:45 | examples-openshift-with-htpasswd/1-install | starting test step 1-install logger.go:42: 13:17:45 | examples-openshift-with-htpasswd/1-install | Jaeger:kuttl-test-relaxing-sparrow/with-htpasswd created logger.go:42: 13:17:51 | examples-openshift-with-htpasswd/1-install | test step completed 1-install logger.go:42: 13:17:51 | examples-openshift-with-htpasswd/2-check-unsecured | starting test step 2-check-unsecured logger.go:42: 13:17:51 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [./ensure-ingress-host.sh] logger.go:42: 13:17:51 | examples-openshift-with-htpasswd/2-check-unsecured | Checking the Ingress host value was populated logger.go:42: 13:17:51 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 0 logger.go:42: 13:17:51 | examples-openshift-with-htpasswd/2-check-unsecured | Hostname is with-htpasswd-kuttl-test-relaxing-sparrow.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com logger.go:42: 13:17:51 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 13:17:51 | examples-openshift-with-htpasswd/2-check-unsecured | Checking an expected HTTP response logger.go:42: 13:17:51 | examples-openshift-with-htpasswd/2-check-unsecured | Running in OpenShift logger.go:42: 13:17:51 | examples-openshift-with-htpasswd/2-check-unsecured | Not using any secret logger.go:42: 13:17:51 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 1/30 the https://with-htpasswd-kuttl-test-relaxing-sparrow.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 13:17:51 | examples-openshift-with-htpasswd/2-check-unsecured | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 13:17:51 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 2/30 the https://with-htpasswd-kuttl-test-relaxing-sparrow.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 13:17:51 | examples-openshift-with-htpasswd/2-check-unsecured | HTTP response is 503. 403 expected. Waiting 10 s logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 3/30 the https://with-htpasswd-kuttl-test-relaxing-sparrow.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/2-check-unsecured | curl response asserted properly logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/2-check-unsecured | test step completed 2-check-unsecured logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/3-check-unauthorized | starting test step 3-check-unauthorized logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [./ensure-ingress-host.sh] logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking the Ingress host value was populated logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 0 logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/3-check-unauthorized | Hostname is with-htpasswd-kuttl-test-relaxing-sparrow.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [sh -c JAEGER_USERNAME=wronguser JAEGER_PASSWORD=wrongpassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking an expected HTTP response logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/3-check-unauthorized | Running in OpenShift logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/3-check-unauthorized | Using Jaeger basic authentication logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 1/30 the https://with-htpasswd-kuttl-test-relaxing-sparrow.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/3-check-unauthorized | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 2/30 the https://with-htpasswd-kuttl-test-relaxing-sparrow.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/3-check-unauthorized | curl response asserted properly logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/3-check-unauthorized | test step completed 3-check-unauthorized logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/4-check-authorized | starting test step 4-check-authorized logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/4-check-authorized | running command: [./ensure-ingress-host.sh] logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/4-check-authorized | Checking the Ingress host value was populated logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/4-check-authorized | Try number 0 logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/4-check-authorized | Hostname is with-htpasswd-kuttl-test-relaxing-sparrow.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/4-check-authorized | running command: [sh -c JAEGER_USERNAME=awesomeuser JAEGER_PASSWORD=awesomepassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE with-htpasswd] logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/4-check-authorized | Checking an expected HTTP response logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/4-check-authorized | Running in OpenShift logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/4-check-authorized | Using Jaeger basic authentication logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/4-check-authorized | Try number 1/30 the https://with-htpasswd-kuttl-test-relaxing-sparrow.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/4-check-authorized | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/4-check-authorized | Try number 2/30 the https://with-htpasswd-kuttl-test-relaxing-sparrow.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/4-check-authorized | curl response asserted properly logger.go:42: 13:18:01 | examples-openshift-with-htpasswd/4-check-authorized | test step completed 4-check-authorized logger.go:42: 13:18:01 | examples-openshift-with-htpasswd | examples-openshift-with-htpasswd events from ns kuttl-test-relaxing-sparrow: logger.go:42: 13:18:01 | examples-openshift-with-htpasswd | 2023-10-02 13:17:48 +0000 UTC Normal Pod with-htpasswd-5c4cc56c6c-m78jq Scheduled Successfully assigned kuttl-test-relaxing-sparrow/with-htpasswd-5c4cc56c6c-m78jq to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:18:01 | examples-openshift-with-htpasswd | 2023-10-02 13:17:48 +0000 UTC Warning Pod with-htpasswd-5c4cc56c6c-m78jq FailedMount MountVolume.SetUp failed for volume "with-htpasswd-collector-tls-config-volume" : secret "with-htpasswd-collector-headless-tls" not found kubelet logger.go:42: 13:18:01 | examples-openshift-with-htpasswd | 2023-10-02 13:17:48 +0000 UTC Normal ReplicaSet.apps with-htpasswd-5c4cc56c6c SuccessfulCreate Created pod: with-htpasswd-5c4cc56c6c-m78jq replicaset-controller logger.go:42: 13:18:01 | examples-openshift-with-htpasswd | 2023-10-02 13:17:48 +0000 UTC Normal Deployment.apps with-htpasswd ScalingReplicaSet Scaled up replica set with-htpasswd-5c4cc56c6c to 1 deployment-controller logger.go:42: 13:18:01 | examples-openshift-with-htpasswd | 2023-10-02 13:17:49 +0000 UTC Normal Pod with-htpasswd-5c4cc56c6c-m78jq AddedInterface Add eth0 [10.129.2.82/23] from ovn-kubernetes logger.go:42: 13:18:01 | examples-openshift-with-htpasswd | 2023-10-02 13:17:49 +0000 UTC Normal Pod with-htpasswd-5c4cc56c6c-m78jq.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:18:01 | examples-openshift-with-htpasswd | 2023-10-02 13:17:49 +0000 UTC Normal Pod with-htpasswd-5c4cc56c6c-m78jq.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:18:01 | examples-openshift-with-htpasswd | 2023-10-02 13:17:49 +0000 UTC Normal Pod with-htpasswd-5c4cc56c6c-m78jq.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:18:01 | examples-openshift-with-htpasswd | 2023-10-02 13:17:49 +0000 UTC Normal Pod with-htpasswd-5c4cc56c6c-m78jq.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:18:01 | examples-openshift-with-htpasswd | 2023-10-02 13:17:49 +0000 UTC Normal Pod with-htpasswd-5c4cc56c6c-m78jq.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:18:01 | examples-openshift-with-htpasswd | 2023-10-02 13:17:49 +0000 UTC Normal Pod with-htpasswd-5c4cc56c6c-m78jq.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:18:01 | examples-openshift-with-htpasswd | Deleting namespace: kuttl-test-relaxing-sparrow === CONT kuttl/harness/examples-openshift-agent-as-daemonset logger.go:42: 13:18:08 | examples-openshift-agent-as-daemonset | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:18:08 | examples-openshift-agent-as-daemonset | Creating namespace: kuttl-test-helping-hound logger.go:42: 13:18:08 | examples-openshift-agent-as-daemonset/0-install | starting test step 0-install logger.go:42: 13:18:08 | examples-openshift-agent-as-daemonset/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 13:18:08 | examples-openshift-agent-as-daemonset/0-install | ServiceAccount:kuttl-test-helping-hound/jaeger-agent-daemonset created logger.go:42: 13:18:08 | examples-openshift-agent-as-daemonset/0-install | test step completed 0-install logger.go:42: 13:18:08 | examples-openshift-agent-as-daemonset/1-add-policy | starting test step 1-add-policy logger.go:42: 13:18:08 | examples-openshift-agent-as-daemonset/1-add-policy | running command: [sh -c oc adm policy --namespace $NAMESPACE add-scc-to-user daemonset-with-hostport -z jaeger-agent-daemonset] logger.go:42: 13:18:08 | examples-openshift-agent-as-daemonset/1-add-policy | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:daemonset-with-hostport added: "jaeger-agent-daemonset" logger.go:42: 13:18:08 | examples-openshift-agent-as-daemonset/1-add-policy | running command: [sh -c sleep 5] logger.go:42: 13:18:13 | examples-openshift-agent-as-daemonset/1-add-policy | test step completed 1-add-policy logger.go:42: 13:18:13 | examples-openshift-agent-as-daemonset/2-install | starting test step 2-install logger.go:42: 13:18:13 | examples-openshift-agent-as-daemonset/2-install | Jaeger:kuttl-test-helping-hound/agent-as-daemonset created logger.go:42: 13:18:20 | examples-openshift-agent-as-daemonset/2-install | test step completed 2-install logger.go:42: 13:18:20 | examples-openshift-agent-as-daemonset/3-install | starting test step 3-install logger.go:42: 13:18:20 | examples-openshift-agent-as-daemonset/3-install | Deployment:kuttl-test-helping-hound/vertx-create-span-sidecar created logger.go:42: 13:18:26 | examples-openshift-agent-as-daemonset/3-install | test step completed 3-install logger.go:42: 13:18:26 | examples-openshift-agent-as-daemonset/4-find-service | starting test step 4-find-service logger.go:42: 13:18:26 | examples-openshift-agent-as-daemonset/4-find-service | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 13:18:27 | examples-openshift-agent-as-daemonset/4-find-service | Warning: resource jaegers/agent-as-daemonset is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:18:33 | examples-openshift-agent-as-daemonset/4-find-service | running command: [sh -c SERVICE_NAME=order ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 JOB_NUMBER=00 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o find-service-00-job.yaml] logger.go:42: 13:18:34 | examples-openshift-agent-as-daemonset/4-find-service | running command: [sh -c kubectl create -f find-service-00-job.yaml -n $NAMESPACE] logger.go:42: 13:18:34 | examples-openshift-agent-as-daemonset/4-find-service | job.batch/00-find-service created logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset/4-find-service | test step completed 4-find-service logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | examples-openshift-agent-as-daemonset events from ns kuttl-test-helping-hound: logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:16 +0000 UTC Normal Pod agent-as-daemonset-96d9d5766-2j8sp Scheduled Successfully assigned kuttl-test-helping-hound/agent-as-daemonset-96d9d5766-2j8sp to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:16 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-96d9d5766 SuccessfulCreate Created pod: agent-as-daemonset-96d9d5766-2j8sp replicaset-controller logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:16 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-96d9d5766 to 1 deployment-controller logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:17 +0000 UTC Warning Pod agent-as-daemonset-96d9d5766-2j8sp FailedMount MountVolume.SetUp failed for volume "agent-as-daemonset-collector-tls-config-volume" : secret "agent-as-daemonset-collector-headless-tls" not found kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:17 +0000 UTC Normal Pod agent-as-daemonset-96d9d5766-2j8sp AddedInterface Add eth0 [10.129.2.83/23] from ovn-kubernetes logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:17 +0000 UTC Normal Pod agent-as-daemonset-96d9d5766-2j8sp.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:17 +0000 UTC Normal Pod agent-as-daemonset-96d9d5766-2j8sp.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:17 +0000 UTC Normal Pod agent-as-daemonset-96d9d5766-2j8sp.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:17 +0000 UTC Normal Pod agent-as-daemonset-96d9d5766-2j8sp.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:17 +0000 UTC Normal Pod agent-as-daemonset-96d9d5766-2j8sp.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:17 +0000 UTC Normal Pod agent-as-daemonset-96d9d5766-2j8sp.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:20 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-jz7c6 Scheduled Successfully assigned kuttl-test-helping-hound/agent-as-daemonset-agent-daemonset-jz7c6 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:20 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-qb5xr Scheduled Successfully assigned kuttl-test-helping-hound/agent-as-daemonset-agent-daemonset-qb5xr to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:20 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-z6jtd Scheduled Successfully assigned kuttl-test-helping-hound/agent-as-daemonset-agent-daemonset-z6jtd to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:20 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-z6jtd daemonset-controller logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:20 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-jz7c6 daemonset-controller logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:20 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-qb5xr daemonset-controller logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:20 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-2nkn8 Scheduled Successfully assigned kuttl-test-helping-hound/vertx-create-span-sidecar-54946f4fd-2nkn8 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:20 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-54946f4fd SuccessfulCreate Created pod: vertx-create-span-sidecar-54946f4fd-2nkn8 replicaset-controller logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:20 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-54946f4fd to 1 deployment-controller logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:21 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-jz7c6 AddedInterface Add eth0 [10.129.2.84/23] from ovn-kubernetes logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:21 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-jz7c6.spec.containers{jaeger-agent-daemonset} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:21 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-jz7c6.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:21 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-jz7c6.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:21 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-qb5xr AddedInterface Add eth0 [10.131.0.48/23] from ovn-kubernetes logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:21 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-qb5xr.spec.containers{jaeger-agent-daemonset} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:21 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-z6jtd AddedInterface Add eth0 [10.128.2.44/23] from ovn-kubernetes logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:21 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-z6jtd.spec.containers{jaeger-agent-daemonset} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:21 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-z6jtd.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:21 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-z6jtd.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:21 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-2nkn8 AddedInterface Add eth0 [10.131.0.47/23] from ovn-kubernetes logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:21 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-2nkn8.spec.containers{vertx-create-span-sidecar} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:24 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-qb5xr.spec.containers{jaeger-agent-daemonset} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" in 3.389687542s (3.389698262s including waiting) kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:24 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-qb5xr.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:24 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-qb5xr.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:24 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-2nkn8.spec.containers{vertx-create-span-sidecar} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.748813128s (3.748827689s including waiting) kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:24 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-2nkn8.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:24 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-2nkn8.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:28 +0000 UTC Normal Pod agent-as-daemonset-96d9d5766-2j8sp.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:28 +0000 UTC Normal Pod agent-as-daemonset-96d9d5766-2j8sp.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:28 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-96d9d5766 SuccessfulDelete Deleted pod: agent-as-daemonset-96d9d5766-2j8sp replicaset-controller logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:28 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled down replica set agent-as-daemonset-96d9d5766 to 0 from 1 deployment-controller logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:29 +0000 UTC Normal Pod agent-as-daemonset-8dbb7fc77-9kd64 Scheduled Successfully assigned kuttl-test-helping-hound/agent-as-daemonset-8dbb7fc77-9kd64 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:29 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-8dbb7fc77 SuccessfulCreate Created pod: agent-as-daemonset-8dbb7fc77-9kd64 replicaset-controller logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:29 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-8dbb7fc77 to 1 deployment-controller logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:30 +0000 UTC Normal Pod agent-as-daemonset-8dbb7fc77-9kd64 AddedInterface Add eth0 [10.129.2.85/23] from ovn-kubernetes logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:30 +0000 UTC Normal Pod agent-as-daemonset-8dbb7fc77-9kd64.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:30 +0000 UTC Normal Pod agent-as-daemonset-8dbb7fc77-9kd64.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:30 +0000 UTC Normal Pod agent-as-daemonset-8dbb7fc77-9kd64.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:30 +0000 UTC Normal Pod agent-as-daemonset-8dbb7fc77-9kd64.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:30 +0000 UTC Normal Pod agent-as-daemonset-8dbb7fc77-9kd64.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:30 +0000 UTC Normal Pod agent-as-daemonset-8dbb7fc77-9kd64.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:33 +0000 UTC Warning Pod vertx-create-span-sidecar-54946f4fd-2nkn8.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.47:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:33 +0000 UTC Warning Pod vertx-create-span-sidecar-54946f4fd-2nkn8.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.47:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:34 +0000 UTC Normal Pod 00-find-service-cr68x Scheduled Successfully assigned kuttl-test-helping-hound/00-find-service-cr68x to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:34 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-cr68x job-controller logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:35 +0000 UTC Normal Pod 00-find-service-cr68x AddedInterface Add eth0 [10.128.2.45/23] from ovn-kubernetes logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:35 +0000 UTC Normal Pod 00-find-service-cr68x.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:35 +0000 UTC Normal Pod 00-find-service-cr68x.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:35 +0000 UTC Normal Pod 00-find-service-cr68x.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:35 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-2nkn8.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:35 +0000 UTC Warning Pod vertx-create-span-sidecar-54946f4fd-2nkn8.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.47:8080/": read tcp 10.131.0.2:60518->10.131.0.47:8080: read: connection reset by peer kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:35 +0000 UTC Warning Pod vertx-create-span-sidecar-54946f4fd-2nkn8.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.47:8080/": dial tcp 10.131.0.47:8080: connect: connection refused kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:35 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-2nkn8.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:18:46 +0000 UTC Warning Pod vertx-create-span-sidecar-54946f4fd-2nkn8.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.47:8080/": read tcp 10.131.0.2:43386->10.131.0.47:8080: read: connection reset by peer kubelet logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | 2023-10-02 13:19:02 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 13:19:02 | examples-openshift-agent-as-daemonset | Deleting namespace: kuttl-test-helping-hound === CONT kuttl/harness/examples-collector-with-priority-class logger.go:42: 13:19:09 | examples-collector-with-priority-class | Creating namespace: kuttl-test-champion-sheepdog logger.go:42: 13:19:09 | examples-collector-with-priority-class/0-install | starting test step 0-install logger.go:42: 13:19:09 | examples-collector-with-priority-class/0-install | PriorityClass:/collector-high-priority created logger.go:42: 13:19:09 | examples-collector-with-priority-class/0-install | Jaeger:kuttl-test-champion-sheepdog/collector-with-high-priority created logger.go:42: 13:19:15 | examples-collector-with-priority-class/0-install | test step completed 0-install logger.go:42: 13:19:15 | examples-collector-with-priority-class/1-smoke-test | starting test step 1-smoke-test logger.go:42: 13:19:15 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE collector-with-high-priority /dev/null] logger.go:42: 13:19:17 | examples-collector-with-priority-class/1-smoke-test | Warning: resource jaegers/collector-with-high-priority is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:19:23 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:19:23 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:19:24 | examples-collector-with-priority-class/1-smoke-test | job.batch/report-span created logger.go:42: 13:19:24 | examples-collector-with-priority-class/1-smoke-test | job.batch/check-span created logger.go:42: 13:19:35 | examples-collector-with-priority-class/1-smoke-test | test step completed 1-smoke-test logger.go:42: 13:19:35 | examples-collector-with-priority-class | examples-collector-with-priority-class events from ns kuttl-test-champion-sheepdog: logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:13 +0000 UTC Normal Pod collector-with-high-priority-5695dbc54c-4cgln Scheduled Successfully assigned kuttl-test-champion-sheepdog/collector-with-high-priority-5695dbc54c-4cgln to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:13 +0000 UTC Normal Pod collector-with-high-priority-5695dbc54c-4cgln AddedInterface Add eth0 [10.131.0.49/23] from ovn-kubernetes logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:13 +0000 UTC Normal Pod collector-with-high-priority-5695dbc54c-4cgln.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:13 +0000 UTC Normal Pod collector-with-high-priority-5695dbc54c-4cgln.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:13 +0000 UTC Normal Pod collector-with-high-priority-5695dbc54c-4cgln.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:13 +0000 UTC Normal Pod collector-with-high-priority-5695dbc54c-4cgln.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:13 +0000 UTC Normal Pod collector-with-high-priority-5695dbc54c-4cgln.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:13 +0000 UTC Normal Pod collector-with-high-priority-5695dbc54c-4cgln.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:13 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-5695dbc54c SuccessfulCreate Created pod: collector-with-high-priority-5695dbc54c-4cgln replicaset-controller logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:13 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled up replica set collector-with-high-priority-5695dbc54c to 1 deployment-controller logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:20 +0000 UTC Normal Pod collector-with-high-priority-5695dbc54c-4cgln.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:20 +0000 UTC Normal Pod collector-with-high-priority-5695dbc54c-4cgln.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:20 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-5695dbc54c SuccessfulDelete Deleted pod: collector-with-high-priority-5695dbc54c-4cgln replicaset-controller logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:20 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled down replica set collector-with-high-priority-5695dbc54c to 0 from 1 deployment-controller logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:21 +0000 UTC Normal Pod collector-with-high-priority-69fbc64c97-gvk4z Scheduled Successfully assigned kuttl-test-champion-sheepdog/collector-with-high-priority-69fbc64c97-gvk4z to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:21 +0000 UTC Normal Pod collector-with-high-priority-69fbc64c97-gvk4z AddedInterface Add eth0 [10.131.0.50/23] from ovn-kubernetes logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:21 +0000 UTC Normal Pod collector-with-high-priority-69fbc64c97-gvk4z.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:21 +0000 UTC Normal Pod collector-with-high-priority-69fbc64c97-gvk4z.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:21 +0000 UTC Normal Pod collector-with-high-priority-69fbc64c97-gvk4z.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:21 +0000 UTC Normal Pod collector-with-high-priority-69fbc64c97-gvk4z.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:21 +0000 UTC Normal Pod collector-with-high-priority-69fbc64c97-gvk4z.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:21 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-69fbc64c97 SuccessfulCreate Created pod: collector-with-high-priority-69fbc64c97-gvk4z replicaset-controller logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:21 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled up replica set collector-with-high-priority-69fbc64c97 to 1 deployment-controller logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:22 +0000 UTC Normal Pod collector-with-high-priority-69fbc64c97-gvk4z.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:24 +0000 UTC Normal Pod check-span-klw8n Scheduled Successfully assigned kuttl-test-champion-sheepdog/check-span-klw8n to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:24 +0000 UTC Normal Pod check-span-klw8n AddedInterface Add eth0 [10.128.2.46/23] from ovn-kubernetes logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:24 +0000 UTC Normal Pod check-span-klw8n.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:24 +0000 UTC Normal Pod check-span-klw8n.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:24 +0000 UTC Normal Pod check-span-klw8n.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:24 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-klw8n job-controller logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:24 +0000 UTC Normal Pod report-span-sh469 Scheduled Successfully assigned kuttl-test-champion-sheepdog/report-span-sh469 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:24 +0000 UTC Normal Pod report-span-sh469 AddedInterface Add eth0 [10.129.2.86/23] from ovn-kubernetes logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:24 +0000 UTC Normal Pod report-span-sh469.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:24 +0000 UTC Normal Pod report-span-sh469.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:24 +0000 UTC Normal Pod report-span-sh469.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:24 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-sh469 job-controller logger.go:42: 13:19:35 | examples-collector-with-priority-class | 2023-10-02 13:19:35 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:19:35 | examples-collector-with-priority-class | Deleting namespace: kuttl-test-champion-sheepdog === CONT kuttl/harness/examples-all-in-one-with-options logger.go:42: 13:19:47 | examples-all-in-one-with-options | Creating namespace: kuttl-test-welcome-llama logger.go:42: 13:19:47 | examples-all-in-one-with-options/0-install | starting test step 0-install logger.go:42: 13:19:47 | examples-all-in-one-with-options/0-install | Jaeger:kuttl-test-welcome-llama/my-jaeger created logger.go:42: 13:19:53 | examples-all-in-one-with-options/0-install | test step completed 0-install logger.go:42: 13:19:53 | examples-all-in-one-with-options/1-smoke-test | starting test step 1-smoke-test logger.go:42: 13:19:53 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 13:19:54 | examples-all-in-one-with-options/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:20:01 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443/jaeger MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:20:01 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:20:01 | examples-all-in-one-with-options/1-smoke-test | job.batch/report-span created logger.go:42: 13:20:01 | examples-all-in-one-with-options/1-smoke-test | job.batch/check-span created logger.go:42: 13:20:13 | examples-all-in-one-with-options/1-smoke-test | test step completed 1-smoke-test logger.go:42: 13:20:13 | examples-all-in-one-with-options | examples-all-in-one-with-options events from ns kuttl-test-welcome-llama: logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:51 +0000 UTC Normal Pod my-jaeger-589489f645-gr96r Scheduled Successfully assigned kuttl-test-welcome-llama/my-jaeger-589489f645-gr96r to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:51 +0000 UTC Normal Pod my-jaeger-589489f645-gr96r AddedInterface Add eth0 [10.129.2.87/23] from ovn-kubernetes logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:51 +0000 UTC Normal Pod my-jaeger-589489f645-gr96r.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:51 +0000 UTC Normal Pod my-jaeger-589489f645-gr96r.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:51 +0000 UTC Normal Pod my-jaeger-589489f645-gr96r.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:51 +0000 UTC Normal Pod my-jaeger-589489f645-gr96r.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:51 +0000 UTC Normal Pod my-jaeger-589489f645-gr96r.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:51 +0000 UTC Normal Pod my-jaeger-589489f645-gr96r.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:51 +0000 UTC Normal ReplicaSet.apps my-jaeger-589489f645 SuccessfulCreate Created pod: my-jaeger-589489f645-gr96r replicaset-controller logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:51 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-589489f645 to 1 deployment-controller logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:56 +0000 UTC Normal Pod my-jaeger-589489f645-gr96r.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:56 +0000 UTC Normal Pod my-jaeger-589489f645-gr96r.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:56 +0000 UTC Normal ReplicaSet.apps my-jaeger-589489f645 SuccessfulDelete Deleted pod: my-jaeger-589489f645-gr96r replicaset-controller logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:56 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-589489f645 to 0 from 1 deployment-controller logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:57 +0000 UTC Normal Pod my-jaeger-7745dccf5d-psjhg Scheduled Successfully assigned kuttl-test-welcome-llama/my-jaeger-7745dccf5d-psjhg to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:57 +0000 UTC Normal Pod my-jaeger-7745dccf5d-psjhg AddedInterface Add eth0 [10.131.0.51/23] from ovn-kubernetes logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:57 +0000 UTC Normal Pod my-jaeger-7745dccf5d-psjhg.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:57 +0000 UTC Normal Pod my-jaeger-7745dccf5d-psjhg.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:57 +0000 UTC Normal Pod my-jaeger-7745dccf5d-psjhg.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:57 +0000 UTC Normal Pod my-jaeger-7745dccf5d-psjhg.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:57 +0000 UTC Normal Pod my-jaeger-7745dccf5d-psjhg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:57 +0000 UTC Normal Pod my-jaeger-7745dccf5d-psjhg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:57 +0000 UTC Normal ReplicaSet.apps my-jaeger-7745dccf5d SuccessfulCreate Created pod: my-jaeger-7745dccf5d-psjhg replicaset-controller logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:19:57 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-7745dccf5d to 1 deployment-controller logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:20:01 +0000 UTC Normal Pod check-span-gvhqx Scheduled Successfully assigned kuttl-test-welcome-llama/check-span-gvhqx to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:20:01 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-gvhqx job-controller logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:20:01 +0000 UTC Normal Pod report-span-kpv5w Scheduled Successfully assigned kuttl-test-welcome-llama/report-span-kpv5w to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:20:01 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-kpv5w job-controller logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:20:02 +0000 UTC Normal Pod check-span-gvhqx AddedInterface Add eth0 [10.128.2.47/23] from ovn-kubernetes logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:20:02 +0000 UTC Normal Pod check-span-gvhqx.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:20:02 +0000 UTC Normal Pod check-span-gvhqx.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:20:02 +0000 UTC Normal Pod check-span-gvhqx.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:20:02 +0000 UTC Normal Pod report-span-kpv5w AddedInterface Add eth0 [10.129.2.88/23] from ovn-kubernetes logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:20:02 +0000 UTC Normal Pod report-span-kpv5w.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:20:02 +0000 UTC Normal Pod report-span-kpv5w.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:20:02 +0000 UTC Normal Pod report-span-kpv5w.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:20:13 | examples-all-in-one-with-options | 2023-10-02 13:20:13 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:20:13 | examples-all-in-one-with-options | Deleting namespace: kuttl-test-welcome-llama === CONT kuttl/harness/examples-auto-provision-kafka logger.go:42: 13:20:26 | examples-auto-provision-kafka | Creating namespace: kuttl-test-absolute-airedale logger.go:42: 13:20:26 | examples-auto-provision-kafka/2-install | starting test step 2-install logger.go:42: 13:20:26 | examples-auto-provision-kafka/2-install | Jaeger:kuttl-test-absolute-airedale/auto-provision-kafka created logger.go:42: 13:24:42 | examples-auto-provision-kafka/2-install | test step completed 2-install logger.go:42: 13:24:42 | examples-auto-provision-kafka/3- | starting test step 3- logger.go:42: 13:31:43 | examples-auto-provision-kafka/3- | test step failed 3- case.go:364: failed in step 3- case.go:366: strimzipodsets.core.strimzi.io "auto-provision-kafka-kafka" not found logger.go:42: 13:31:43 | examples-auto-provision-kafka | examples-auto-provision-kafka events from ns kuttl-test-absolute-airedale: logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:20:33 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestabsoluteairedaleautoprovision-1-58df4d97b5 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestabsoluteairedaleautoprovision-19pbjf replicaset-controller logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:20:33 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestabsoluteairedaleautoprovision-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestabsoluteairedaleautoprovision-1-58df4d97b5 to 1 deployment-controller logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:20:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestabsoluteairedaleautoprovision-19pbjf Scheduled Successfully assigned kuttl-test-absolute-airedale/elasticsearch-cdm-kuttltestabsoluteairedaleautoprovision-19pbjf to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:20:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestabsoluteairedaleautoprovision-19pbjf AddedInterface Add eth0 [10.129.2.89/23] from ovn-kubernetes logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:20:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestabsoluteairedaleautoprovision-19pbjf.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:20:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestabsoluteairedaleautoprovision-19pbjf.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:20:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestabsoluteairedaleautoprovision-19pbjf.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:20:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestabsoluteairedaleautoprovision-19pbjf.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:20:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestabsoluteairedaleautoprovision-19pbjf.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:20:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestabsoluteairedaleautoprovision-19pbjf.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:20:43 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestabsoluteairedaleautoprovision-19pbjf.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:20:48 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestabsoluteairedaleautoprovision-19pbjf.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:21:01 +0000 UTC Normal PodDisruptionBudget.policy auto-provision-kafka-zookeeper NoPods No matching pods found controllermanager logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:21:01 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:21:01 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 ExternalProvisioning waiting for a volume to be created, either by external provisioner "ebs.csi.aws.com" or manually created by system administrator persistentvolume-controller logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:21:01 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-absolute-airedale/data-auto-provision-kafka-zookeeper-0" logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:21:04 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-bff030d0-56ea-4050-be20-057e7a1856f7 logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:21:05 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0 Scheduled Successfully assigned kuttl-test-absolute-airedale/auto-provision-kafka-zookeeper-0 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:22:08 +0000 UTC Warning Pod auto-provision-kafka-zookeeper-0 FailedAttachVolume AttachVolume.Attach failed for volume "pvc-bff030d0-56ea-4050-be20-057e7a1856f7" : rpc error: code = Internal desc = Could not attach volume "vol-03d619334aa39f39e" to node "i-05ba1fef2452c39da": attachment of disk "vol-03d619334aa39f39e" failed, expected device to be attached but was attaching attachdetach-controller logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:23:08 +0000 UTC Warning Pod auto-provision-kafka-zookeeper-0 FailedMount Unable to attach or mount volumes: unmounted volumes=[data], unattached volumes=[data], failed to process volumes=[]: timed out waiting for the condition kubelet logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:24:20 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-bff030d0-56ea-4050-be20-057e7a1856f7" attachdetach-controller logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:24:21 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0 AddedInterface Add eth0 [10.131.0.52/23] from ovn-kubernetes logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:24:21 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0.spec.containers{zookeeper} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" already present on machine kubelet logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:24:21 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 13:31:43 | examples-auto-provision-kafka | 2023-10-02 13:24:21 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 13:31:43 | examples-auto-provision-kafka | Deleting namespace: kuttl-test-absolute-airedale === CONT kuttl/harness/examples-agent-with-priority-class logger.go:42: 13:31:55 | examples-agent-with-priority-class | Creating namespace: kuttl-test-learning-quetzal logger.go:42: 13:31:55 | examples-agent-with-priority-class/0-install | starting test step 0-install logger.go:42: 13:31:55 | examples-agent-with-priority-class/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 13:31:55 | examples-agent-with-priority-class/0-install | ServiceAccount:kuttl-test-learning-quetzal/jaeger-agent-daemonset created logger.go:42: 13:31:55 | examples-agent-with-priority-class/0-install | test step completed 0-install logger.go:42: 13:31:55 | examples-agent-with-priority-class/1-install | starting test step 1-install logger.go:42: 13:31:55 | examples-agent-with-priority-class/1-install | PriorityClass:/high-priority created logger.go:42: 13:31:55 | examples-agent-with-priority-class/1-install | Jaeger:kuttl-test-learning-quetzal/agent-as-daemonset created logger.go:42: 13:32:01 | examples-agent-with-priority-class/1-install | test step completed 1-install logger.go:42: 13:32:01 | examples-agent-with-priority-class/2-smoke-test | starting test step 2-smoke-test logger.go:42: 13:32:01 | examples-agent-with-priority-class/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 13:32:02 | examples-agent-with-priority-class/2-smoke-test | Warning: resource jaegers/agent-as-daemonset is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:32:08 | examples-agent-with-priority-class/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:32:09 | examples-agent-with-priority-class/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:32:09 | examples-agent-with-priority-class/2-smoke-test | job.batch/report-span created logger.go:42: 13:32:09 | examples-agent-with-priority-class/2-smoke-test | job.batch/check-span created logger.go:42: 13:32:21 | examples-agent-with-priority-class/2-smoke-test | test step completed 2-smoke-test logger.go:42: 13:32:21 | examples-agent-with-priority-class | examples-agent-with-priority-class events from ns kuttl-test-learning-quetzal: logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:31:58 +0000 UTC Normal Pod agent-as-daemonset-5cdb88d644-z9tln Scheduled Successfully assigned kuttl-test-learning-quetzal/agent-as-daemonset-5cdb88d644-z9tln to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:31:58 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-5cdb88d644 SuccessfulCreate Created pod: agent-as-daemonset-5cdb88d644-z9tln replicaset-controller logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:31:58 +0000 UTC Warning DaemonSet.apps agent-as-daemonset-agent-daemonset FailedCreate Error creating: pods "agent-as-daemonset-agent-daemonset-" is forbidden: unable to validate against any security context constraint: [provider "anyuid": Forbidden: not usable by user or serviceaccount, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5775: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5778: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6831: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6832: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 14271: Host ports are not allowed to be used, provider "restricted": Forbidden: not usable by user or serviceaccount, provider "nonroot-v2": Forbidden: not usable by user or serviceaccount, provider "nonroot": Forbidden: not usable by user or serviceaccount, provider "hostmount-anyuid": Forbidden: not usable by user or serviceaccount, provider "elasticsearch-scc": Forbidden: not usable by user or serviceaccount, provider "machine-api-termination-handler": Forbidden: not usable by user or serviceaccount, provider "daemonset-with-hostport": Forbidden: not usable by user or serviceaccount, provider "hostnetwork-v2": Forbidden: not usable by user or serviceaccount, provider "hostnetwork": Forbidden: not usable by user or serviceaccount, provider "hostaccess": Forbidden: not usable by user or serviceaccount, provider "node-exporter": Forbidden: not usable by user or serviceaccount, provider "privileged": Forbidden: not usable by user or serviceaccount] daemonset-controller logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:31:58 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-5cdb88d644 to 1 deployment-controller logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:31:59 +0000 UTC Normal Pod agent-as-daemonset-5cdb88d644-z9tln AddedInterface Add eth0 [10.131.0.54/23] from ovn-kubernetes logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:31:59 +0000 UTC Normal Pod agent-as-daemonset-5cdb88d644-z9tln.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:31:59 +0000 UTC Normal Pod agent-as-daemonset-5cdb88d644-z9tln.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:31:59 +0000 UTC Normal Pod agent-as-daemonset-5cdb88d644-z9tln.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:31:59 +0000 UTC Normal Pod agent-as-daemonset-5cdb88d644-z9tln.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:31:59 +0000 UTC Normal Pod agent-as-daemonset-5cdb88d644-z9tln.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:31:59 +0000 UTC Normal Pod agent-as-daemonset-5cdb88d644-z9tln.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:03 +0000 UTC Normal Pod agent-as-daemonset-5cdb88d644-z9tln.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:03 +0000 UTC Normal Pod agent-as-daemonset-5cdb88d644-z9tln.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:03 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-5cdb88d644 SuccessfulDelete Deleted pod: agent-as-daemonset-5cdb88d644-z9tln replicaset-controller logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:03 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled down replica set agent-as-daemonset-5cdb88d644 to 0 from 1 deployment-controller logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:04 +0000 UTC Normal Pod agent-as-daemonset-5f58dd5b46-dbhm9 Scheduled Successfully assigned kuttl-test-learning-quetzal/agent-as-daemonset-5f58dd5b46-dbhm9 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:04 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-5f58dd5b46 SuccessfulCreate Created pod: agent-as-daemonset-5f58dd5b46-dbhm9 replicaset-controller logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:04 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-5f58dd5b46 to 1 deployment-controller logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:05 +0000 UTC Normal Pod agent-as-daemonset-5f58dd5b46-dbhm9 AddedInterface Add eth0 [10.131.0.55/23] from ovn-kubernetes logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:05 +0000 UTC Normal Pod agent-as-daemonset-5f58dd5b46-dbhm9.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:05 +0000 UTC Normal Pod agent-as-daemonset-5f58dd5b46-dbhm9.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:05 +0000 UTC Normal Pod agent-as-daemonset-5f58dd5b46-dbhm9.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:05 +0000 UTC Normal Pod agent-as-daemonset-5f58dd5b46-dbhm9.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:05 +0000 UTC Normal Pod agent-as-daemonset-5f58dd5b46-dbhm9.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:05 +0000 UTC Normal Pod agent-as-daemonset-5f58dd5b46-dbhm9.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:09 +0000 UTC Normal Pod check-span-mj2tk Scheduled Successfully assigned kuttl-test-learning-quetzal/check-span-mj2tk to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:09 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-mj2tk job-controller logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:09 +0000 UTC Normal Pod report-span-wfzw6 Scheduled Successfully assigned kuttl-test-learning-quetzal/report-span-wfzw6 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:09 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-wfzw6 job-controller logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:10 +0000 UTC Normal Pod check-span-mj2tk AddedInterface Add eth0 [10.128.2.48/23] from ovn-kubernetes logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:10 +0000 UTC Normal Pod check-span-mj2tk.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:10 +0000 UTC Normal Pod check-span-mj2tk.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:10 +0000 UTC Normal Pod check-span-mj2tk.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:10 +0000 UTC Normal Pod report-span-wfzw6 AddedInterface Add eth0 [10.129.2.90/23] from ovn-kubernetes logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:10 +0000 UTC Normal Pod report-span-wfzw6.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:10 +0000 UTC Normal Pod report-span-wfzw6.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:10 +0000 UTC Normal Pod report-span-wfzw6.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:32:21 | examples-agent-with-priority-class | 2023-10-02 13:32:20 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:32:21 | examples-agent-with-priority-class | Deleting namespace: kuttl-test-learning-quetzal === CONT kuttl/harness/examples-agent-as-daemonset logger.go:42: 13:32:34 | examples-agent-as-daemonset | Creating namespace: kuttl-test-needed-cockatoo logger.go:42: 13:32:34 | examples-agent-as-daemonset/0-install | starting test step 0-install logger.go:42: 13:32:34 | examples-agent-as-daemonset/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 13:32:34 | examples-agent-as-daemonset/0-install | ServiceAccount:kuttl-test-needed-cockatoo/jaeger-agent-daemonset created logger.go:42: 13:32:34 | examples-agent-as-daemonset/0-install | test step completed 0-install logger.go:42: 13:32:34 | examples-agent-as-daemonset/1-install | starting test step 1-install logger.go:42: 13:32:34 | examples-agent-as-daemonset/1-install | Jaeger:kuttl-test-needed-cockatoo/agent-as-daemonset created logger.go:42: 13:32:41 | examples-agent-as-daemonset/1-install | test step completed 1-install logger.go:42: 13:32:41 | examples-agent-as-daemonset/2-smoke-test | starting test step 2-smoke-test logger.go:42: 13:32:41 | examples-agent-as-daemonset/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 13:32:42 | examples-agent-as-daemonset/2-smoke-test | Warning: resource jaegers/agent-as-daemonset is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:32:48 | examples-agent-as-daemonset/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:32:49 | examples-agent-as-daemonset/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:32:49 | examples-agent-as-daemonset/2-smoke-test | job.batch/report-span created logger.go:42: 13:32:49 | examples-agent-as-daemonset/2-smoke-test | job.batch/check-span created logger.go:42: 13:39:50 | examples-agent-as-daemonset/2-smoke-test | test step failed 2-smoke-test case.go:364: failed in step 2-smoke-test case.go:366: --- Job:kuttl-test-needed-cockatoo/check-span +++ Job:kuttl-test-needed-cockatoo/check-span @@ -1,8 +1,142 @@ apiVersion: batch/v1 kind: Job metadata: + annotations: + batch.kubernetes.io/job-tracking: "" + kubectl.kubernetes.io/last-applied-configuration: | + {"apiVersion":"batch/v1","kind":"Job","metadata":{"annotations":{},"name":"check-span","namespace":"kuttl-test-needed-cockatoo"},"spec":{"backoffLimit":15,"template":{"spec":{"containers":[{"command":["./query"],"env":[{"name":"SERVICE_NAME","value":"smoke-test-service"},{"name":"QUERY_HOST","value":"https://agent-as-daemonset-query:443"},{"name":"SECRET_PATH","value":"/var/run/secrets/api-token/token"}],"image":"registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678","name":"asserts-container","volumeMounts":[{"mountPath":"/var/run/secrets/api-token","name":"token-api-volume"}]}],"restartPolicy":"OnFailure","volumes":[{"name":"token-api-volume","secret":{"secretName":"e2e-test"}}]}}}} + labels: + batch.kubernetes.io/controller-uid: 7952651b-ffdf-4e25-bd2d-ee78404ddf65 + batch.kubernetes.io/job-name: check-span + controller-uid: 7952651b-ffdf-4e25-bd2d-ee78404ddf65 + job-name: check-span + managedFields: + - apiVersion: batch/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:kubectl.kubernetes.io/last-applied-configuration: {} + f:spec: + f:backoffLimit: {} + f:completionMode: {} + f:completions: {} + f:parallelism: {} + f:suspend: {} + f:template: + f:spec: + f:containers: + k:{"name":"asserts-container"}: + .: {} + f:command: {} + f:env: + .: {} + k:{"name":"QUERY_HOST"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SECRET_PATH"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SERVICE_NAME"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:name: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/var/run/secrets/api-token"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"token-api-volume"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: kubectl-client-side-apply + operation: Update + time: "2023-10-02T13:32:49Z" + - apiVersion: batch/v1 + fieldsType: FieldsV1 + fieldsV1: + f:status: + f:active: {} + f:ready: {} + f:startTime: {} + f:uncountedTerminatedPods: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2023-10-02T13:38:08Z" name: check-span namespace: kuttl-test-needed-cockatoo +spec: + backoffLimit: 15 + completionMode: NonIndexed + completions: 1 + parallelism: 1 + selector: + matchLabels: + batch.kubernetes.io/controller-uid: 7952651b-ffdf-4e25-bd2d-ee78404ddf65 + suspend: false + template: + metadata: + creationTimestamp: null + labels: + batch.kubernetes.io/controller-uid: 7952651b-ffdf-4e25-bd2d-ee78404ddf65 + batch.kubernetes.io/job-name: check-span + controller-uid: 7952651b-ffdf-4e25-bd2d-ee78404ddf65 + job-name: check-span + spec: + containers: + - command: + - ./query + env: + - name: SERVICE_NAME + value: smoke-test-service + - name: QUERY_HOST + value: https://agent-as-daemonset-query:443 + - name: SECRET_PATH + value: /var/run/secrets/api-token/token + image: registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 + imagePullPolicy: IfNotPresent + name: asserts-container + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /var/run/secrets/api-token + name: token-api-volume + dnsPolicy: ClusterFirst + restartPolicy: OnFailure + schedulerName: default-scheduler + securityContext: {} + terminationGracePeriodSeconds: 30 + volumes: + - name: token-api-volume + secret: + defaultMode: 420 + secretName: e2e-test status: - succeeded: 1 + active: 1 + ready: 1 + startTime: "2023-10-02T13:32:49Z" + uncountedTerminatedPods: {} case.go:366: resource Job:kuttl-test-needed-cockatoo/check-span: .status.succeeded: key is missing from map logger.go:42: 13:39:50 | examples-agent-as-daemonset | examples-agent-as-daemonset events from ns kuttl-test-needed-cockatoo: logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:38 +0000 UTC Normal Pod agent-as-daemonset-d4bdd5b76-8hxfm Scheduled Successfully assigned kuttl-test-needed-cockatoo/agent-as-daemonset-d4bdd5b76-8hxfm to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:38 +0000 UTC Warning Pod agent-as-daemonset-d4bdd5b76-8hxfm FailedMount MountVolume.SetUp failed for volume "agent-as-daemonset-collector-tls-config-volume" : secret "agent-as-daemonset-collector-headless-tls" not found kubelet logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:38 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-d4bdd5b76 SuccessfulCreate Created pod: agent-as-daemonset-d4bdd5b76-8hxfm replicaset-controller logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:38 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-d4bdd5b76 to 1 deployment-controller logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:39 +0000 UTC Normal Pod agent-as-daemonset-d4bdd5b76-8hxfm AddedInterface Add eth0 [10.129.2.91/23] from ovn-kubernetes logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:39 +0000 UTC Normal Pod agent-as-daemonset-d4bdd5b76-8hxfm.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:39 +0000 UTC Normal Pod agent-as-daemonset-d4bdd5b76-8hxfm.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:39 +0000 UTC Normal Pod agent-as-daemonset-d4bdd5b76-8hxfm.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:39 +0000 UTC Normal Pod agent-as-daemonset-d4bdd5b76-8hxfm.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:39 +0000 UTC Normal Pod agent-as-daemonset-d4bdd5b76-8hxfm.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:39 +0000 UTC Normal Pod agent-as-daemonset-d4bdd5b76-8hxfm.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:41 +0000 UTC Warning DaemonSet.apps agent-as-daemonset-agent-daemonset FailedCreate Error creating: pods "agent-as-daemonset-agent-daemonset-" is forbidden: unable to validate against any security context constraint: [provider "anyuid": Forbidden: not usable by user or serviceaccount, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5775: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5778: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6831: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6832: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 14271: Host ports are not allowed to be used, provider "restricted": Forbidden: not usable by user or serviceaccount, provider "nonroot-v2": Forbidden: not usable by user or serviceaccount, provider "nonroot": Forbidden: not usable by user or serviceaccount, provider "hostmount-anyuid": Forbidden: not usable by user or serviceaccount, provider "elasticsearch-scc": Forbidden: not usable by user or serviceaccount, provider "machine-api-termination-handler": Forbidden: not usable by user or serviceaccount, provider "daemonset-with-hostport": Forbidden: not usable by user or serviceaccount, provider "hostnetwork-v2": Forbidden: not usable by user or serviceaccount, provider "hostnetwork": Forbidden: not usable by user or serviceaccount, provider "hostaccess": Forbidden: not usable by user or serviceaccount, provider "node-exporter": Forbidden: not usable by user or serviceaccount, provider "privileged": Forbidden: not usable by user or serviceaccount] daemonset-controller logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:49 +0000 UTC Normal Pod check-span-2jrt5 Scheduled Successfully assigned kuttl-test-needed-cockatoo/check-span-2jrt5 to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:49 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-2jrt5 job-controller logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:49 +0000 UTC Normal Pod report-span-69j74 Scheduled Successfully assigned kuttl-test-needed-cockatoo/report-span-69j74 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:49 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-69j74 job-controller logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:50 +0000 UTC Normal Pod check-span-2jrt5 AddedInterface Add eth0 [10.128.2.49/23] from ovn-kubernetes logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:50 +0000 UTC Normal Pod check-span-2jrt5.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:50 +0000 UTC Normal Pod check-span-2jrt5.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:50 +0000 UTC Normal Pod check-span-2jrt5.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:50 +0000 UTC Normal Pod report-span-69j74 AddedInterface Add eth0 [10.131.0.56/23] from ovn-kubernetes logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:50 +0000 UTC Normal Pod report-span-69j74.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:50 +0000 UTC Normal Pod report-span-69j74.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:32:50 +0000 UTC Normal Pod report-span-69j74.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:39:50 | examples-agent-as-daemonset | 2023-10-02 13:37:55 +0000 UTC Warning Pod check-span-2jrt5.spec.containers{asserts-container} BackOff Back-off restarting failed container asserts-container in pod check-span-2jrt5_kuttl-test-needed-cockatoo(b4d2e520-a0a9-47fd-a1a5-526a6c6f0195) kubelet logger.go:42: 13:39:50 | examples-agent-as-daemonset | Deleting namespace: kuttl-test-needed-cockatoo === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- FAIL: kuttl (2257.08s) --- FAIL: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.94s) --- PASS: kuttl/harness/examples-service-types (52.05s) --- PASS: kuttl/harness/examples-with-sampling (58.37s) --- PASS: kuttl/harness/examples-with-cassandra (68.25s) --- FAIL: kuttl/harness/examples-with-badger-and-volume (447.84s) --- PASS: kuttl/harness/examples-with-badger (38.61s) --- PASS: kuttl/harness/examples-simplest (33.01s) --- PASS: kuttl/harness/examples-simple-prod-with-volumes (103.64s) --- PASS: kuttl/harness/examples-simple-prod (67.28s) --- PASS: kuttl/harness/examples-business-application-injected-sidecar (44.52s) --- PASS: kuttl/harness/examples-openshift-with-htpasswd (23.06s) --- PASS: kuttl/harness/examples-openshift-agent-as-daemonset (61.70s) --- PASS: kuttl/harness/examples-collector-with-priority-class (37.78s) --- PASS: kuttl/harness/examples-all-in-one-with-options (38.82s) --- FAIL: kuttl/harness/examples-auto-provision-kafka (688.88s) --- PASS: kuttl/harness/examples-agent-with-priority-class (39.15s) --- FAIL: kuttl/harness/examples-agent-as-daemonset (448.13s) FAIL + exit_code=1 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name examples --report --output /logs/artifacts/examples.xml ./artifacts/kuttl-report.xml time="2023-10-02T13:40:03Z" level=debug msg="Setting a new name for the test suites" time="2023-10-02T13:40:03Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-10-02T13:40:03Z" level=debug msg="normalizing test case names" time="2023-10-02T13:40:03Z" level=debug msg="examples/artifacts -> examples_artifacts" time="2023-10-02T13:40:03Z" level=debug msg="examples/examples-service-types -> examples_examples_service_types" time="2023-10-02T13:40:03Z" level=debug msg="examples/examples-with-sampling -> examples_examples_with_sampling" time="2023-10-02T13:40:03Z" level=debug msg="examples/examples-with-cassandra -> examples_examples_with_cassandra" time="2023-10-02T13:40:03Z" level=debug msg="examples/examples-with-badger-and-volume -> examples_examples_with_badger_and_volume" time="2023-10-02T13:40:03Z" level=debug msg="examples/examples-with-badger -> examples_examples_with_badger" time="2023-10-02T13:40:03Z" level=debug msg="examples/examples-simplest -> examples_examples_simplest" time="2023-10-02T13:40:03Z" level=debug msg="examples/examples-simple-prod-with-volumes -> examples_examples_simple_prod_with_volumes" time="2023-10-02T13:40:03Z" level=debug msg="examples/examples-simple-prod -> examples_examples_simple_prod" time="2023-10-02T13:40:03Z" level=debug msg="examples/examples-business-application-injected-sidecar -> examples_examples_business_application_injected_sidecar" time="2023-10-02T13:40:03Z" level=debug msg="examples/examples-openshift-with-htpasswd -> examples_examples_openshift_with_htpasswd" time="2023-10-02T13:40:03Z" level=debug msg="examples/examples-openshift-agent-as-daemonset -> examples_examples_openshift_agent_as_daemonset" time="2023-10-02T13:40:03Z" level=debug msg="examples/examples-collector-with-priority-class -> examples_examples_collector_with_priority_class" time="2023-10-02T13:40:03Z" level=debug msg="examples/examples-all-in-one-with-options -> examples_examples_all_in_one_with_options" time="2023-10-02T13:40:03Z" level=debug msg="examples/examples-auto-provision-kafka -> examples_examples_auto_provision_kafka" time="2023-10-02T13:40:03Z" level=debug msg="examples/examples-agent-with-priority-class -> examples_examples_agent_with_priority_class" time="2023-10-02T13:40:03Z" level=debug msg="examples/examples-agent-as-daemonset -> examples_examples_agent_as_daemonset" +---------------------------------------------------------+--------+ | NAME | RESULT | +---------------------------------------------------------+--------+ | examples_artifacts | passed | | examples_examples_service_types | passed | | examples_examples_with_sampling | passed | | examples_examples_with_cassandra | passed | | examples_examples_with_badger_and_volume | failed | | examples_examples_with_badger | passed | | examples_examples_simplest | passed | | examples_examples_simple_prod_with_volumes | passed | | examples_examples_simple_prod | passed | | examples_examples_business_application_injected_sidecar | passed | | examples_examples_openshift_with_htpasswd | passed | | examples_examples_openshift_agent_as_daemonset | passed | | examples_examples_collector_with_priority_class | passed | | examples_examples_all_in_one_with_options | passed | | examples_examples_auto_provision_kafka | failed | | examples_examples_agent_with_priority_class | passed | | examples_examples_agent_as_daemonset | failed | +---------------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 1 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 3 -gt 0 ']' + count=2 + '[' 2 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh generate false true + '[' 3 -ne 3 ']' + test_suite_name=generate + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/generate.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-generate make[2]: Entering directory '/tmp/jaeger-tests' test -s /tmp/jaeger-tests/bin/operator-sdk || curl -sLo /tmp/jaeger-tests/bin/operator-sdk https://github.com/operator-framework/operator-sdk/releases/download/v1.27.0/operator-sdk_`go env GOOS`_`go env GOARCH` ./hack/install/install-golangci-lint.sh Installing golangci-lint golangci-lint 1.53.2 is installed already ./hack/install/install-goimports.sh Installing goimports Try 0... go install golang.org/x/tools/cmd/goimports@v0.1.12 >>>> Formatting code... ./.ci/format.sh >>>> Building... ./hack/install/install-dependencies.sh Installing go dependencies Try 0... go mod download GOOS= GOARCH= CGO_ENABLED=0 GO111MODULE=on go build -ldflags "-X "github.com/jaegertracing/jaeger-operator/pkg/version".version="1.49.0" -X "github.com/jaegertracing/jaeger-operator/pkg/version".buildDate=2023-10-02T13:40:05Z -X "github.com/jaegertracing/jaeger-operator/pkg/version".defaultJaeger="1.49.0"" -o "bin/jaeger-operator" main.go JAEGER_VERSION="1.49.0" ./tests/e2e/generate/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-29-231104 True False 66m Cluster version is 4.14.0-0.nightly-2023-09-29-231104' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-29-231104 True False 66m Cluster version is 4.14.0-0.nightly-2023-09-29-231104' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/generate/render.sh ++ export SUITE_DIR=./tests/e2e/generate ++ SUITE_DIR=./tests/e2e/generate ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/generate ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + '[' true = true ']' + skip_test generate 'This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 2 -ne 2 ']' + test_name=generate + message='This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/generate/_build + '[' _build '!=' _build ']' + rm -rf generate + warning 'generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed\e[0m' WAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running generate E2E tests' Running generate E2E tests + cd tests/e2e/generate/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3595368357 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 13:40:22 | artifacts | Creating namespace: kuttl-test-first-snipe logger.go:42: 13:40:22 | artifacts | artifacts events from ns kuttl-test-first-snipe: logger.go:42: 13:40:22 | artifacts | Deleting namespace: kuttl-test-first-snipe === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (6.32s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.27s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name generate --report --output /logs/artifacts/generate.xml ./artifacts/kuttl-report.xml time="2023-10-02T13:40:29Z" level=debug msg="Setting a new name for the test suites" time="2023-10-02T13:40:29Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-10-02T13:40:29Z" level=debug msg="normalizing test case names" time="2023-10-02T13:40:29Z" level=debug msg="generate/artifacts -> generate_artifacts" +--------------------+--------+ | NAME | RESULT | +--------------------+--------+ | generate_artifacts | passed | +--------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 1 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 3 -gt 0 ']' + count=2 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + '[' 2 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh miscellaneous false true + '[' 3 -ne 3 ']' + test_suite_name=miscellaneous + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/miscellaneous.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-miscellaneous make[2]: Entering directory '/tmp/jaeger-tests' SKIP_ES_EXTERNAL=true ./tests/e2e/miscellaneous/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-29-231104 True False 66m Cluster version is 4.14.0-0.nightly-2023-09-29-231104' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-29-231104 True False 66m Cluster version is 4.14.0-0.nightly-2023-09-29-231104' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/miscellaneous/render.sh ++ export SUITE_DIR=./tests/e2e/miscellaneous ++ SUITE_DIR=./tests/e2e/miscellaneous ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/miscellaneous ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + '[' true = true ']' + skip_test cassandra-spark 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=cassandra-spark + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + rm -rf cassandra-spark + warning 'cassandra-spark: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: cassandra-spark: Test not supported in OpenShift\e[0m' WAR: cassandra-spark: Test not supported in OpenShift + start_test collector-autoscale + '[' 1 -ne 1 ']' + test_name=collector-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-autoscale\e[0m' Rendering files for test collector-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p collector-autoscale + cd collector-autoscale + jaeger_name=simple-prod + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + ELASTICSEARCH_NODECOUNT=1 + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.resources.requests.memory="200m"' 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.autoscale=true 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.minReplicas=1 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.maxReplicas=2 01-install.yaml + kubectl api-versions + grep autoscaling/v2beta2 -q + rm ./04-assert.yaml + generate_otlp_e2e_tests http + test_protocol=http + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-http\e[0m' Rendering files for test collector-otlp-allinone-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-autoscale + '[' collector-autoscale '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-http + cd collector-otlp-allinone-http + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger http true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-http\e[0m' Rendering files for test collector-otlp-production-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-http + '[' collector-otlp-allinone-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-http + cd collector-otlp-production-http + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger http true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + generate_otlp_e2e_tests grpc + test_protocol=grpc + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-grpc\e[0m' Rendering files for test collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-http + '[' collector-otlp-production-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-grpc + cd collector-otlp-allinone-grpc + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-grpc\e[0m' Rendering files for test collector-otlp-production-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-grpc + '[' collector-otlp-allinone-grpc '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-grpc + cd collector-otlp-production-grpc + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + '[' true = true ']' + skip_test istio 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=istio + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-grpc + '[' collector-otlp-production-grpc '!=' _build ']' + cd .. + rm -rf istio + warning 'istio: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: istio: Test not supported in OpenShift\e[0m' WAR: istio: Test not supported in OpenShift + '[' true = true ']' + skip_test outside-cluster 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=outside-cluster + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + rm -rf outside-cluster + warning 'outside-cluster: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: outside-cluster: Test not supported in OpenShift\e[0m' WAR: outside-cluster: Test not supported in OpenShift + start_test set-custom-img + '[' 1 -ne 1 ']' + test_name=set-custom-img + echo =========================================================================== =========================================================================== + info 'Rendering files for test set-custom-img' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test set-custom-img\e[0m' Rendering files for test set-custom-img + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p set-custom-img + cd set-custom-img + jaeger_name=my-jaeger + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.image="test"' ./02-install.yaml + '[' true = true ']' + skip_test non-cluster-wide 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=non-cluster-wide + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/set-custom-img + '[' set-custom-img '!=' _build ']' + cd .. + rm -rf non-cluster-wide + warning 'non-cluster-wide: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: non-cluster-wide: Test not supported in OpenShift\e[0m' WAR: non-cluster-wide: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running miscellaneous E2E tests' Running miscellaneous E2E tests + cd tests/e2e/miscellaneous/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3595368357 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 7 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/collector-autoscale === PAUSE kuttl/harness/collector-autoscale === RUN kuttl/harness/collector-otlp-allinone-grpc === PAUSE kuttl/harness/collector-otlp-allinone-grpc === RUN kuttl/harness/collector-otlp-allinone-http === PAUSE kuttl/harness/collector-otlp-allinone-http === RUN kuttl/harness/collector-otlp-production-grpc === PAUSE kuttl/harness/collector-otlp-production-grpc === RUN kuttl/harness/collector-otlp-production-http === PAUSE kuttl/harness/collector-otlp-production-http === RUN kuttl/harness/set-custom-img === PAUSE kuttl/harness/set-custom-img === CONT kuttl/harness/artifacts logger.go:42: 13:40:39 | artifacts | Creating namespace: kuttl-test-content-bison logger.go:42: 13:40:39 | artifacts | artifacts events from ns kuttl-test-content-bison: logger.go:42: 13:40:39 | artifacts | Deleting namespace: kuttl-test-content-bison === CONT kuttl/harness/collector-otlp-production-grpc logger.go:42: 13:40:45 | collector-otlp-production-grpc | Creating namespace: kuttl-test-poetic-fowl logger.go:42: 13:40:45 | collector-otlp-production-grpc/1-install | starting test step 1-install logger.go:42: 13:40:45 | collector-otlp-production-grpc/1-install | Jaeger:kuttl-test-poetic-fowl/my-jaeger created logger.go:42: 13:41:21 | collector-otlp-production-grpc/1-install | test step completed 1-install logger.go:42: 13:41:21 | collector-otlp-production-grpc/2-smoke-test | starting test step 2-smoke-test logger.go:42: 13:41:21 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 13:41:22 | collector-otlp-production-grpc/2-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:41:28 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c REPORTING_PROTOCOL=grpc ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 13:41:29 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:41:29 | collector-otlp-production-grpc/2-smoke-test | job.batch/report-span created logger.go:42: 13:41:29 | collector-otlp-production-grpc/2-smoke-test | job.batch/check-span created logger.go:42: 13:41:49 | collector-otlp-production-grpc/2-smoke-test | test step completed 2-smoke-test logger.go:42: 13:41:49 | collector-otlp-production-grpc | collector-otlp-production-grpc events from ns kuttl-test-poetic-fowl: logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:40:54 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestpoeticfowlmyjaeger-1-785676645f SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestpoeticfowlmyjaeger-1-785676645f452hf replicaset-controller logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:40:54 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpoeticfowlmyjaeger-1-785676645f452hf Scheduled Successfully assigned kuttl-test-poetic-fowl/elasticsearch-cdm-kuttltestpoeticfowlmyjaeger-1-785676645f452hf to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:40:54 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpoeticfowlmyjaeger-1-785676645f452hf AddedInterface Add eth0 [10.131.0.57/23] from ovn-kubernetes logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:40:54 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpoeticfowlmyjaeger-1-785676645f452hf.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:40:54 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpoeticfowlmyjaeger-1-785676645f452hf.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:40:54 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpoeticfowlmyjaeger-1-785676645f452hf.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:40:54 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpoeticfowlmyjaeger-1-785676645f452hf.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:40:54 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpoeticfowlmyjaeger-1-785676645f452hf.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:40:54 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpoeticfowlmyjaeger-1-785676645f452hf.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:40:54 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestpoeticfowlmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestpoeticfowlmyjaeger-1-785676645f to 1 deployment-controller logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:04 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpoeticfowlmyjaeger-1-785676645f452hf.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:09 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpoeticfowlmyjaeger-1-785676645f452hf.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:20 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-zsqfh Scheduled Successfully assigned kuttl-test-poetic-fowl/my-jaeger-collector-558ccfc8dd-zsqfh to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:20 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-zsqfh AddedInterface Add eth0 [10.128.2.50/23] from ovn-kubernetes logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:20 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-zsqfh.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:20 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-zsqfh.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:20 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-zsqfh.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:20 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-558ccfc8dd SuccessfulCreate Created pod: my-jaeger-collector-558ccfc8dd-zsqfh replicaset-controller logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:20 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-558ccfc8dd to 1 deployment-controller logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:20 +0000 UTC Normal Pod my-jaeger-query-659889fbdb-k57g7 Scheduled Successfully assigned kuttl-test-poetic-fowl/my-jaeger-query-659889fbdb-k57g7 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:20 +0000 UTC Normal Pod my-jaeger-query-659889fbdb-k57g7 AddedInterface Add eth0 [10.129.2.92/23] from ovn-kubernetes logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:20 +0000 UTC Normal Pod my-jaeger-query-659889fbdb-k57g7.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:20 +0000 UTC Normal Pod my-jaeger-query-659889fbdb-k57g7.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:20 +0000 UTC Normal Pod my-jaeger-query-659889fbdb-k57g7.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:20 +0000 UTC Normal Pod my-jaeger-query-659889fbdb-k57g7.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:20 +0000 UTC Normal Pod my-jaeger-query-659889fbdb-k57g7.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:20 +0000 UTC Normal Pod my-jaeger-query-659889fbdb-k57g7.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:20 +0000 UTC Normal Pod my-jaeger-query-659889fbdb-k57g7.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:20 +0000 UTC Normal Pod my-jaeger-query-659889fbdb-k57g7.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:20 +0000 UTC Normal Pod my-jaeger-query-659889fbdb-k57g7.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:20 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-659889fbdb SuccessfulCreate Created pod: my-jaeger-query-659889fbdb-k57g7 replicaset-controller logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:20 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-659889fbdb to 1 deployment-controller logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:24 +0000 UTC Normal Pod my-jaeger-query-659889fbdb-k57g7.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:24 +0000 UTC Normal Pod my-jaeger-query-659889fbdb-k57g7.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:24 +0000 UTC Normal Pod my-jaeger-query-659889fbdb-k57g7.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:24 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-659889fbdb SuccessfulDelete Deleted pod: my-jaeger-query-659889fbdb-k57g7 replicaset-controller logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:24 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-659889fbdb to 0 from 1 deployment-controller logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:25 +0000 UTC Normal Pod my-jaeger-query-58b5fb46d5-7xjrn Scheduled Successfully assigned kuttl-test-poetic-fowl/my-jaeger-query-58b5fb46d5-7xjrn to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:25 +0000 UTC Normal Pod my-jaeger-query-58b5fb46d5-7xjrn AddedInterface Add eth0 [10.129.2.93/23] from ovn-kubernetes logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:25 +0000 UTC Normal Pod my-jaeger-query-58b5fb46d5-7xjrn.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:25 +0000 UTC Normal Pod my-jaeger-query-58b5fb46d5-7xjrn.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:25 +0000 UTC Normal Pod my-jaeger-query-58b5fb46d5-7xjrn.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:25 +0000 UTC Normal Pod my-jaeger-query-58b5fb46d5-7xjrn.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:25 +0000 UTC Normal Pod my-jaeger-query-58b5fb46d5-7xjrn.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:25 +0000 UTC Normal Pod my-jaeger-query-58b5fb46d5-7xjrn.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:25 +0000 UTC Normal Pod my-jaeger-query-58b5fb46d5-7xjrn.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:25 +0000 UTC Normal Pod my-jaeger-query-58b5fb46d5-7xjrn.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:25 +0000 UTC Normal Pod my-jaeger-query-58b5fb46d5-7xjrn.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:25 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-58b5fb46d5 SuccessfulCreate Created pod: my-jaeger-query-58b5fb46d5-7xjrn replicaset-controller logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:25 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-58b5fb46d5 to 1 deployment-controller logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:29 +0000 UTC Normal Pod check-span-xdggm Scheduled Successfully assigned kuttl-test-poetic-fowl/check-span-xdggm to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:29 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-xdggm job-controller logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:29 +0000 UTC Normal Pod report-span-6w66v Scheduled Successfully assigned kuttl-test-poetic-fowl/report-span-6w66v to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:29 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-6w66v job-controller logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:30 +0000 UTC Normal Pod check-span-xdggm AddedInterface Add eth0 [10.129.2.94/23] from ovn-kubernetes logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:30 +0000 UTC Normal Pod check-span-xdggm.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:30 +0000 UTC Normal Pod check-span-xdggm.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:30 +0000 UTC Normal Pod check-span-xdggm.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:30 +0000 UTC Normal Pod report-span-6w66v AddedInterface Add eth0 [10.128.2.51/23] from ovn-kubernetes logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:30 +0000 UTC Normal Pod report-span-6w66v.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:30 +0000 UTC Normal Pod report-span-6w66v.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:30 +0000 UTC Normal Pod report-span-6w66v.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:35 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:35 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:35 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:41:49 | collector-otlp-production-grpc | 2023-10-02 13:41:49 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:41:49 | collector-otlp-production-grpc | Deleting namespace: kuttl-test-poetic-fowl === CONT kuttl/harness/set-custom-img logger.go:42: 13:42:02 | set-custom-img | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:42:02 | set-custom-img | Ignoring check-collector-img.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:42:02 | set-custom-img | Creating namespace: kuttl-test-in-caribou logger.go:42: 13:42:02 | set-custom-img/1-install | starting test step 1-install logger.go:42: 13:42:02 | set-custom-img/1-install | Jaeger:kuttl-test-in-caribou/my-jaeger created logger.go:42: 13:42:40 | set-custom-img/1-install | test step completed 1-install logger.go:42: 13:42:40 | set-custom-img/2-install | starting test step 2-install logger.go:42: 13:42:40 | set-custom-img/2-install | Jaeger:kuttl-test-in-caribou/my-jaeger updated logger.go:42: 13:42:40 | set-custom-img/2-install | test step completed 2-install logger.go:42: 13:42:40 | set-custom-img/3-check-image | starting test step 3-check-image logger.go:42: 13:42:40 | set-custom-img/3-check-image | running command: [sh -c ./check-collector-img.sh] logger.go:42: 13:42:40 | set-custom-img/3-check-image | Collector image missmatch. Expected: test. Has: registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c logger.go:42: 13:42:45 | set-custom-img/3-check-image | Collector image asserted properly! logger.go:42: 13:42:45 | set-custom-img/3-check-image | test step completed 3-check-image logger.go:42: 13:42:45 | set-custom-img | set-custom-img events from ns kuttl-test-in-caribou: logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:09 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestincariboumyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestincariboumyjaeger-1-dcbc895f9 to 1 deployment-controller logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestincariboumyjaeger-1-dcbc895f9-ljwwv Scheduled Successfully assigned kuttl-test-in-caribou/elasticsearch-cdm-kuttltestincariboumyjaeger-1-dcbc895f9-ljwwv to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestincariboumyjaeger-1-dcbc895f9-ljwwv AddedInterface Add eth0 [10.131.0.58/23] from ovn-kubernetes logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestincariboumyjaeger-1-dcbc895f9-ljwwv.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestincariboumyjaeger-1-dcbc895f9-ljwwv.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestincariboumyjaeger-1-dcbc895f9-ljwwv.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestincariboumyjaeger-1-dcbc895f9-ljwwv.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestincariboumyjaeger-1-dcbc895f9-ljwwv.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestincariboumyjaeger-1-dcbc895f9-ljwwv.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:10 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestincariboumyjaeger-1-dcbc895f9 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestincariboumyjaeger-1-dcbc895f9-ljwwv replicaset-controller logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:20 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestincariboumyjaeger-1-dcbc895f9-ljwwv.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:25 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestincariboumyjaeger-1-dcbc895f9-ljwwv.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:36 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-564qx Scheduled Successfully assigned kuttl-test-in-caribou/my-jaeger-collector-558ccfc8dd-564qx to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:36 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-558ccfc8dd SuccessfulCreate Created pod: my-jaeger-collector-558ccfc8dd-564qx replicaset-controller logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:36 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-558ccfc8dd to 1 deployment-controller logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:36 +0000 UTC Normal Pod my-jaeger-query-764979f4df-ncmrb Scheduled Successfully assigned kuttl-test-in-caribou/my-jaeger-query-764979f4df-ncmrb to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:36 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-764979f4df SuccessfulCreate Created pod: my-jaeger-query-764979f4df-ncmrb replicaset-controller logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:36 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-764979f4df to 1 deployment-controller logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:37 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-564qx AddedInterface Add eth0 [10.129.2.95/23] from ovn-kubernetes logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:37 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-564qx.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:37 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-564qx.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:37 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-564qx.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:37 +0000 UTC Normal Pod my-jaeger-query-764979f4df-ncmrb AddedInterface Add eth0 [10.129.2.96/23] from ovn-kubernetes logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:37 +0000 UTC Normal Pod my-jaeger-query-764979f4df-ncmrb.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:37 +0000 UTC Normal Pod my-jaeger-query-764979f4df-ncmrb.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:37 +0000 UTC Normal Pod my-jaeger-query-764979f4df-ncmrb.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:37 +0000 UTC Normal Pod my-jaeger-query-764979f4df-ncmrb.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:37 +0000 UTC Normal Pod my-jaeger-query-764979f4df-ncmrb.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:37 +0000 UTC Normal Pod my-jaeger-query-764979f4df-ncmrb.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:37 +0000 UTC Normal Pod my-jaeger-query-764979f4df-ncmrb.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:37 +0000 UTC Normal Pod my-jaeger-query-764979f4df-ncmrb.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:37 +0000 UTC Normal Pod my-jaeger-query-764979f4df-ncmrb.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:43 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-564qx.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:43 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-558ccfc8dd SuccessfulDelete Deleted pod: my-jaeger-collector-558ccfc8dd-564qx replicaset-controller logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:43 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled down replica set my-jaeger-collector-558ccfc8dd to 0 from 1 deployment-controller logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:44 +0000 UTC Normal Pod my-jaeger-collector-6755b759f8-7gmkm Scheduled Successfully assigned kuttl-test-in-caribou/my-jaeger-collector-6755b759f8-7gmkm to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:44 +0000 UTC Normal Pod my-jaeger-collector-6755b759f8-7gmkm AddedInterface Add eth0 [10.128.2.52/23] from ovn-kubernetes logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:44 +0000 UTC Normal Pod my-jaeger-collector-6755b759f8-7gmkm.spec.containers{jaeger-collector} Pulling Pulling image "test" kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:44 +0000 UTC Warning Pod my-jaeger-collector-6755b759f8-7gmkm.spec.containers{jaeger-collector} Failed Failed to pull image "test": rpc error: code = Unknown desc = reading manifest latest in docker.io/library/test: requested access to the resource is denied kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:44 +0000 UTC Warning Pod my-jaeger-collector-6755b759f8-7gmkm.spec.containers{jaeger-collector} Failed Error: ErrImagePull kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:44 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-6755b759f8 SuccessfulCreate Created pod: my-jaeger-collector-6755b759f8-7gmkm replicaset-controller logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:44 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-6755b759f8 to 1 deployment-controller logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:45 +0000 UTC Normal Pod my-jaeger-collector-6755b759f8-7gmkm.spec.containers{jaeger-collector} BackOff Back-off pulling image "test" kubelet logger.go:42: 13:42:45 | set-custom-img | 2023-10-02 13:42:45 +0000 UTC Warning Pod my-jaeger-collector-6755b759f8-7gmkm.spec.containers{jaeger-collector} Failed Error: ImagePullBackOff kubelet logger.go:42: 13:42:45 | set-custom-img | Deleting namespace: kuttl-test-in-caribou === CONT kuttl/harness/collector-otlp-production-http logger.go:42: 13:42:52 | collector-otlp-production-http | Creating namespace: kuttl-test-excited-adder logger.go:42: 13:42:52 | collector-otlp-production-http/1-install | starting test step 1-install logger.go:42: 13:42:52 | collector-otlp-production-http/1-install | Jaeger:kuttl-test-excited-adder/my-jaeger created logger.go:42: 13:43:28 | collector-otlp-production-http/1-install | test step completed 1-install logger.go:42: 13:43:28 | collector-otlp-production-http/2-smoke-test | starting test step 2-smoke-test logger.go:42: 13:43:28 | collector-otlp-production-http/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 13:43:29 | collector-otlp-production-http/2-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:43:36 | collector-otlp-production-http/2-smoke-test | running command: [sh -c REPORTING_PROTOCOL=http ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 13:43:36 | collector-otlp-production-http/2-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:43:37 | collector-otlp-production-http/2-smoke-test | job.batch/report-span created logger.go:42: 13:43:37 | collector-otlp-production-http/2-smoke-test | job.batch/check-span created logger.go:42: 13:43:49 | collector-otlp-production-http/2-smoke-test | test step completed 2-smoke-test logger.go:42: 13:43:49 | collector-otlp-production-http | collector-otlp-production-http events from ns kuttl-test-excited-adder: logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:42:59 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestexcitedaddermyjaeger-1-6588d8d55f SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestexcitedaddermyjaeger-1-6588d8d5qd9vb replicaset-controller logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:42:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexcitedaddermyjaeger-1-6588d8d5qd9vb Scheduled Successfully assigned kuttl-test-excited-adder/elasticsearch-cdm-kuttltestexcitedaddermyjaeger-1-6588d8d5qd9vb to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:42:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexcitedaddermyjaeger-1-6588d8d5qd9vb AddedInterface Add eth0 [10.131.0.59/23] from ovn-kubernetes logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:42:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexcitedaddermyjaeger-1-6588d8d5qd9vb.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:42:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexcitedaddermyjaeger-1-6588d8d5qd9vb.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:42:59 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestexcitedaddermyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestexcitedaddermyjaeger-1-6588d8d55f to 1 deployment-controller logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:00 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexcitedaddermyjaeger-1-6588d8d5qd9vb.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:00 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexcitedaddermyjaeger-1-6588d8d5qd9vb.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:00 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexcitedaddermyjaeger-1-6588d8d5qd9vb.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:00 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexcitedaddermyjaeger-1-6588d8d5qd9vb.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:09 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestexcitedaddermyjaeger-1-6588d8d5qd9vb.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:15 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestexcitedaddermyjaeger-1-6588d8d5qd9vb.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:26 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-4w4s6 Scheduled Successfully assigned kuttl-test-excited-adder/my-jaeger-collector-558ccfc8dd-4w4s6 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:26 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-4w4s6 AddedInterface Add eth0 [10.129.2.97/23] from ovn-kubernetes logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:26 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-4w4s6.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:26 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-4w4s6.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:26 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-4w4s6.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:26 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-558ccfc8dd SuccessfulCreate Created pod: my-jaeger-collector-558ccfc8dd-4w4s6 replicaset-controller logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:26 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-558ccfc8dd to 1 deployment-controller logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:26 +0000 UTC Normal Pod my-jaeger-query-6f67d4c44d-b6d6n Scheduled Successfully assigned kuttl-test-excited-adder/my-jaeger-query-6f67d4c44d-b6d6n to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:26 +0000 UTC Normal Pod my-jaeger-query-6f67d4c44d-b6d6n AddedInterface Add eth0 [10.129.2.98/23] from ovn-kubernetes logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:26 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-6f67d4c44d SuccessfulCreate Created pod: my-jaeger-query-6f67d4c44d-b6d6n replicaset-controller logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:26 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-6f67d4c44d to 1 deployment-controller logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:27 +0000 UTC Normal Pod my-jaeger-query-6f67d4c44d-b6d6n.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:27 +0000 UTC Normal Pod my-jaeger-query-6f67d4c44d-b6d6n.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:27 +0000 UTC Normal Pod my-jaeger-query-6f67d4c44d-b6d6n.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:27 +0000 UTC Normal Pod my-jaeger-query-6f67d4c44d-b6d6n.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:27 +0000 UTC Normal Pod my-jaeger-query-6f67d4c44d-b6d6n.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:27 +0000 UTC Normal Pod my-jaeger-query-6f67d4c44d-b6d6n.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:27 +0000 UTC Normal Pod my-jaeger-query-6f67d4c44d-b6d6n.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:27 +0000 UTC Normal Pod my-jaeger-query-6f67d4c44d-b6d6n.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:27 +0000 UTC Normal Pod my-jaeger-query-6f67d4c44d-b6d6n.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:32 +0000 UTC Normal Pod my-jaeger-query-6f67d4c44d-b6d6n.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:32 +0000 UTC Normal Pod my-jaeger-query-6f67d4c44d-b6d6n.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:32 +0000 UTC Normal Pod my-jaeger-query-6f67d4c44d-b6d6n.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:32 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-6f67d4c44d SuccessfulDelete Deleted pod: my-jaeger-query-6f67d4c44d-b6d6n replicaset-controller logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:32 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-6f67d4c44d to 0 from 1 deployment-controller logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:33 +0000 UTC Normal Pod my-jaeger-query-9b555db8c-96xnl Scheduled Successfully assigned kuttl-test-excited-adder/my-jaeger-query-9b555db8c-96xnl to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:33 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-9b555db8c SuccessfulCreate Created pod: my-jaeger-query-9b555db8c-96xnl replicaset-controller logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:33 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-9b555db8c to 1 deployment-controller logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:34 +0000 UTC Normal Pod my-jaeger-query-9b555db8c-96xnl AddedInterface Add eth0 [10.128.2.53/23] from ovn-kubernetes logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:34 +0000 UTC Normal Pod my-jaeger-query-9b555db8c-96xnl.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:34 +0000 UTC Normal Pod my-jaeger-query-9b555db8c-96xnl.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:34 +0000 UTC Normal Pod my-jaeger-query-9b555db8c-96xnl.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:34 +0000 UTC Normal Pod my-jaeger-query-9b555db8c-96xnl.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:34 +0000 UTC Normal Pod my-jaeger-query-9b555db8c-96xnl.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:34 +0000 UTC Normal Pod my-jaeger-query-9b555db8c-96xnl.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:34 +0000 UTC Normal Pod my-jaeger-query-9b555db8c-96xnl.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:34 +0000 UTC Normal Pod my-jaeger-query-9b555db8c-96xnl.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:34 +0000 UTC Normal Pod my-jaeger-query-9b555db8c-96xnl.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:37 +0000 UTC Normal Pod check-span-d77fg Scheduled Successfully assigned kuttl-test-excited-adder/check-span-d77fg to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:37 +0000 UTC Normal Pod check-span-d77fg AddedInterface Add eth0 [10.129.2.100/23] from ovn-kubernetes logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:37 +0000 UTC Normal Pod check-span-d77fg.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:37 +0000 UTC Normal Pod check-span-d77fg.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:37 +0000 UTC Normal Pod check-span-d77fg.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:37 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-d77fg job-controller logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:37 +0000 UTC Normal Pod report-span-qch26 Scheduled Successfully assigned kuttl-test-excited-adder/report-span-qch26 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:37 +0000 UTC Normal Pod report-span-qch26 AddedInterface Add eth0 [10.129.2.99/23] from ovn-kubernetes logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:37 +0000 UTC Normal Pod report-span-qch26.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:37 +0000 UTC Normal Pod report-span-qch26.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:37 +0000 UTC Normal Pod report-span-qch26.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:37 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-qch26 job-controller logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:44 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:44 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:44 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:43:49 | collector-otlp-production-http | 2023-10-02 13:43:48 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:43:49 | collector-otlp-production-http | Deleting namespace: kuttl-test-excited-adder === CONT kuttl/harness/collector-otlp-allinone-grpc logger.go:42: 13:43:56 | collector-otlp-allinone-grpc | Creating namespace: kuttl-test-set-mollusk logger.go:42: 13:43:56 | collector-otlp-allinone-grpc/0-install | starting test step 0-install logger.go:42: 13:43:56 | collector-otlp-allinone-grpc/0-install | Jaeger:kuttl-test-set-mollusk/my-jaeger created logger.go:42: 13:44:03 | collector-otlp-allinone-grpc/0-install | test step completed 0-install logger.go:42: 13:44:03 | collector-otlp-allinone-grpc/1-smoke-test | starting test step 1-smoke-test logger.go:42: 13:44:03 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 13:44:04 | collector-otlp-allinone-grpc/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:44:10 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=grpc ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 13:44:10 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:44:11 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/report-span created logger.go:42: 13:44:11 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/check-span created logger.go:42: 13:44:31 | collector-otlp-allinone-grpc/1-smoke-test | test step completed 1-smoke-test logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | collector-otlp-allinone-grpc events from ns kuttl-test-set-mollusk: logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:00 +0000 UTC Normal Pod my-jaeger-666f89667f-v2r8f Scheduled Successfully assigned kuttl-test-set-mollusk/my-jaeger-666f89667f-v2r8f to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:00 +0000 UTC Normal Pod my-jaeger-666f89667f-v2r8f AddedInterface Add eth0 [10.129.2.101/23] from ovn-kubernetes logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:00 +0000 UTC Normal Pod my-jaeger-666f89667f-v2r8f.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:00 +0000 UTC Normal ReplicaSet.apps my-jaeger-666f89667f SuccessfulCreate Created pod: my-jaeger-666f89667f-v2r8f replicaset-controller logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:00 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-666f89667f to 1 deployment-controller logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:01 +0000 UTC Normal Pod my-jaeger-666f89667f-v2r8f.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:01 +0000 UTC Normal Pod my-jaeger-666f89667f-v2r8f.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:01 +0000 UTC Normal Pod my-jaeger-666f89667f-v2r8f.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:01 +0000 UTC Normal Pod my-jaeger-666f89667f-v2r8f.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:01 +0000 UTC Normal Pod my-jaeger-666f89667f-v2r8f.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:04 +0000 UTC Warning Pod my-jaeger-666f89667f-v2r8f FailedMount MountVolume.SetUp failed for volume "my-jaeger-service-ca" : configmap references non-existent config key: service-ca.crt kubelet logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:07 +0000 UTC Normal Pod my-jaeger-597bf9b476-szjww Scheduled Successfully assigned kuttl-test-set-mollusk/my-jaeger-597bf9b476-szjww to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:07 +0000 UTC Normal ReplicaSet.apps my-jaeger-597bf9b476 SuccessfulCreate Created pod: my-jaeger-597bf9b476-szjww replicaset-controller logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:07 +0000 UTC Normal Pod my-jaeger-666f89667f-v2r8f.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:07 +0000 UTC Normal Pod my-jaeger-666f89667f-v2r8f.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:07 +0000 UTC Normal ReplicaSet.apps my-jaeger-666f89667f SuccessfulDelete Deleted pod: my-jaeger-666f89667f-v2r8f replicaset-controller logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:07 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-666f89667f to 0 from 1 deployment-controller logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:07 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-597bf9b476 to 1 deployment-controller logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:08 +0000 UTC Normal Pod my-jaeger-597bf9b476-szjww AddedInterface Add eth0 [10.129.2.102/23] from ovn-kubernetes logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:08 +0000 UTC Normal Pod my-jaeger-597bf9b476-szjww.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:08 +0000 UTC Normal Pod my-jaeger-597bf9b476-szjww.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:08 +0000 UTC Normal Pod my-jaeger-597bf9b476-szjww.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:08 +0000 UTC Normal Pod my-jaeger-597bf9b476-szjww.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:08 +0000 UTC Normal Pod my-jaeger-597bf9b476-szjww.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:08 +0000 UTC Normal Pod my-jaeger-597bf9b476-szjww.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:11 +0000 UTC Normal Pod check-span-sh92j Scheduled Successfully assigned kuttl-test-set-mollusk/check-span-sh92j to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:11 +0000 UTC Normal Pod check-span-sh92j AddedInterface Add eth0 [10.128.2.54/23] from ovn-kubernetes logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:11 +0000 UTC Normal Pod check-span-sh92j.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:11 +0000 UTC Normal Pod check-span-sh92j.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:11 +0000 UTC Normal Pod check-span-sh92j.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:11 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-sh92j job-controller logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:11 +0000 UTC Normal Pod report-span-rnjjr Scheduled Successfully assigned kuttl-test-set-mollusk/report-span-rnjjr to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:11 +0000 UTC Normal Pod report-span-rnjjr AddedInterface Add eth0 [10.131.0.60/23] from ovn-kubernetes logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:11 +0000 UTC Normal Pod report-span-rnjjr.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:11 +0000 UTC Normal Pod report-span-rnjjr.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:11 +0000 UTC Normal Pod report-span-rnjjr.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:11 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-rnjjr job-controller logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | 2023-10-02 13:44:30 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:44:31 | collector-otlp-allinone-grpc | Deleting namespace: kuttl-test-set-mollusk === CONT kuttl/harness/collector-otlp-allinone-http logger.go:42: 13:44:38 | collector-otlp-allinone-http | Creating namespace: kuttl-test-striking-bulldog logger.go:42: 13:44:38 | collector-otlp-allinone-http/0-install | starting test step 0-install logger.go:42: 13:44:38 | collector-otlp-allinone-http/0-install | Jaeger:kuttl-test-striking-bulldog/my-jaeger created logger.go:42: 13:44:44 | collector-otlp-allinone-http/0-install | test step completed 0-install logger.go:42: 13:44:44 | collector-otlp-allinone-http/1-smoke-test | starting test step 1-smoke-test logger.go:42: 13:44:44 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 13:44:46 | collector-otlp-allinone-http/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:44:52 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=http ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678 OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 13:44:52 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:44:53 | collector-otlp-allinone-http/1-smoke-test | job.batch/report-span created logger.go:42: 13:44:53 | collector-otlp-allinone-http/1-smoke-test | job.batch/check-span created logger.go:42: 13:45:05 | collector-otlp-allinone-http/1-smoke-test | test step completed 1-smoke-test logger.go:42: 13:45:05 | collector-otlp-allinone-http | collector-otlp-allinone-http events from ns kuttl-test-striking-bulldog: logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:42 +0000 UTC Normal Pod my-jaeger-8656d56f76-fznvl Scheduled Successfully assigned kuttl-test-striking-bulldog/my-jaeger-8656d56f76-fznvl to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:42 +0000 UTC Normal ReplicaSet.apps my-jaeger-8656d56f76 SuccessfulCreate Created pod: my-jaeger-8656d56f76-fznvl replicaset-controller logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:42 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-8656d56f76 to 1 deployment-controller logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:43 +0000 UTC Normal Pod my-jaeger-8656d56f76-fznvl AddedInterface Add eth0 [10.129.2.103/23] from ovn-kubernetes logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:43 +0000 UTC Normal Pod my-jaeger-8656d56f76-fznvl.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:43 +0000 UTC Normal Pod my-jaeger-8656d56f76-fznvl.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:43 +0000 UTC Normal Pod my-jaeger-8656d56f76-fznvl.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:43 +0000 UTC Normal Pod my-jaeger-8656d56f76-fznvl.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:43 +0000 UTC Normal Pod my-jaeger-8656d56f76-fznvl.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:43 +0000 UTC Normal Pod my-jaeger-8656d56f76-fznvl.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:47 +0000 UTC Normal Pod my-jaeger-8656d56f76-fznvl.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:47 +0000 UTC Normal Pod my-jaeger-8656d56f76-fznvl.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:47 +0000 UTC Normal ReplicaSet.apps my-jaeger-8656d56f76 SuccessfulDelete Deleted pod: my-jaeger-8656d56f76-fznvl replicaset-controller logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:47 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-8656d56f76 to 0 from 1 deployment-controller logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:48 +0000 UTC Normal Pod my-jaeger-66779746f4-cmgkb Scheduled Successfully assigned kuttl-test-striking-bulldog/my-jaeger-66779746f4-cmgkb to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:48 +0000 UTC Normal ReplicaSet.apps my-jaeger-66779746f4 SuccessfulCreate Created pod: my-jaeger-66779746f4-cmgkb replicaset-controller logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:48 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-66779746f4 to 1 deployment-controller logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:49 +0000 UTC Normal Pod my-jaeger-66779746f4-cmgkb AddedInterface Add eth0 [10.129.2.104/23] from ovn-kubernetes logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:49 +0000 UTC Normal Pod my-jaeger-66779746f4-cmgkb.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:49 +0000 UTC Normal Pod my-jaeger-66779746f4-cmgkb.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:49 +0000 UTC Normal Pod my-jaeger-66779746f4-cmgkb.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:49 +0000 UTC Normal Pod my-jaeger-66779746f4-cmgkb.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:49 +0000 UTC Normal Pod my-jaeger-66779746f4-cmgkb.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:49 +0000 UTC Normal Pod my-jaeger-66779746f4-cmgkb.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:49 +0000 UTC Warning Pod my-jaeger-66779746f4-cmgkb FailedMount MountVolume.SetUp failed for volume "my-jaeger-service-ca" : configmap references non-existent config key: service-ca.crt kubelet logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:53 +0000 UTC Normal Pod check-span-czh75 Scheduled Successfully assigned kuttl-test-striking-bulldog/check-span-czh75 to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:53 +0000 UTC Normal Pod check-span-czh75 AddedInterface Add eth0 [10.128.2.55/23] from ovn-kubernetes logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:53 +0000 UTC Normal Pod check-span-czh75.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:53 +0000 UTC Normal Pod check-span-czh75.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:53 +0000 UTC Normal Pod check-span-czh75.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:53 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-czh75 job-controller logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:53 +0000 UTC Normal Pod report-span-tlrrj Scheduled Successfully assigned kuttl-test-striking-bulldog/report-span-tlrrj to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:53 +0000 UTC Normal Pod report-span-tlrrj AddedInterface Add eth0 [10.131.0.61/23] from ovn-kubernetes logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:53 +0000 UTC Normal Pod report-span-tlrrj.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:53 +0000 UTC Normal Pod report-span-tlrrj.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:53 +0000 UTC Normal Pod report-span-tlrrj.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:44:53 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-tlrrj job-controller logger.go:42: 13:45:05 | collector-otlp-allinone-http | 2023-10-02 13:45:04 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:45:05 | collector-otlp-allinone-http | Deleting namespace: kuttl-test-striking-bulldog === CONT kuttl/harness/collector-autoscale logger.go:42: 13:45:17 | collector-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:45:17 | collector-autoscale | Ignoring wait-for-hpa.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:45:17 | collector-autoscale | Creating namespace: kuttl-test-capital-doe logger.go:42: 13:45:17 | collector-autoscale/1-install | starting test step 1-install logger.go:42: 13:45:17 | collector-autoscale/1-install | Jaeger:kuttl-test-capital-doe/simple-prod created logger.go:42: 13:45:53 | collector-autoscale/1-install | test step completed 1-install logger.go:42: 13:45:53 | collector-autoscale/2-wait-for-hpa | starting test step 2-wait-for-hpa logger.go:42: 13:45:53 | collector-autoscale/2-wait-for-hpa | running command: [sh -c ./wait-for-hpa.sh] logger.go:42: 13:45:53 | collector-autoscale/2-wait-for-hpa | Some HPA metrics are not known yet logger.go:42: 13:45:54 | collector-autoscale/2-wait-for-hpa | test step completed 2-wait-for-hpa logger.go:42: 13:45:54 | collector-autoscale/3- | starting test step 3- logger.go:42: 13:45:54 | collector-autoscale/3- | test step completed 3- logger.go:42: 13:45:54 | collector-autoscale | collector-autoscale events from ns kuttl-test-capital-doe: logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:23 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestcapitaldoesimpleprod-1-5548cc6fb5 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestcapitaldoesimpleprod-1-5548cc6fg5lkg replicaset-controller logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcapitaldoesimpleprod-1-5548cc6fg5lkg Scheduled Successfully assigned kuttl-test-capital-doe/elasticsearch-cdm-kuttltestcapitaldoesimpleprod-1-5548cc6fg5lkg to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcapitaldoesimpleprod-1-5548cc6fg5lkg AddedInterface Add eth0 [10.131.0.63/23] from ovn-kubernetes logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcapitaldoesimpleprod-1-5548cc6fg5lkg.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcapitaldoesimpleprod-1-5548cc6fg5lkg.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcapitaldoesimpleprod-1-5548cc6fg5lkg.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcapitaldoesimpleprod-1-5548cc6fg5lkg.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:23 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestcapitaldoesimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestcapitaldoesimpleprod-1-5548cc6fb5 to 1 deployment-controller logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcapitaldoesimpleprod-1-5548cc6fg5lkg.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcapitaldoesimpleprod-1-5548cc6fg5lkg.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:33 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestcapitaldoesimpleprod-1-5548cc6fg5lkg.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:39 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestcapitaldoesimpleprod-1-5548cc6fg5lkg.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:50 +0000 UTC Normal Pod simple-prod-collector-b86d94b64-242h5 Scheduled Successfully assigned kuttl-test-capital-doe/simple-prod-collector-b86d94b64-242h5 to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:50 +0000 UTC Normal Pod simple-prod-collector-b86d94b64-242h5 AddedInterface Add eth0 [10.128.2.56/23] from ovn-kubernetes logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:50 +0000 UTC Normal Pod simple-prod-collector-b86d94b64-242h5.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:50 +0000 UTC Normal Pod simple-prod-collector-b86d94b64-242h5.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:50 +0000 UTC Normal Pod simple-prod-collector-b86d94b64-242h5.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:50 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-b86d94b64 SuccessfulCreate Created pod: simple-prod-collector-b86d94b64-242h5 replicaset-controller logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:50 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-b86d94b64 to 1 deployment-controller logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:50 +0000 UTC Normal Pod simple-prod-query-685cbc8576-vrqln Scheduled Successfully assigned kuttl-test-capital-doe/simple-prod-query-685cbc8576-vrqln to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:50 +0000 UTC Normal Pod simple-prod-query-685cbc8576-vrqln AddedInterface Add eth0 [10.129.2.105/23] from ovn-kubernetes logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:50 +0000 UTC Normal Pod simple-prod-query-685cbc8576-vrqln.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:50 +0000 UTC Normal Pod simple-prod-query-685cbc8576-vrqln.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:50 +0000 UTC Normal Pod simple-prod-query-685cbc8576-vrqln.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:50 +0000 UTC Normal Pod simple-prod-query-685cbc8576-vrqln.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:50 +0000 UTC Normal ReplicaSet.apps simple-prod-query-685cbc8576 SuccessfulCreate Created pod: simple-prod-query-685cbc8576-vrqln replicaset-controller logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:50 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-685cbc8576 to 1 deployment-controller logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:51 +0000 UTC Normal Pod simple-prod-query-685cbc8576-vrqln.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:51 +0000 UTC Normal Pod simple-prod-query-685cbc8576-vrqln.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:51 +0000 UTC Normal Pod simple-prod-query-685cbc8576-vrqln.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:51 +0000 UTC Normal Pod simple-prod-query-685cbc8576-vrqln.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:45:54 | collector-autoscale | 2023-10-02 13:45:51 +0000 UTC Normal Pod simple-prod-query-685cbc8576-vrqln.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:45:54 | collector-autoscale | Deleting namespace: kuttl-test-capital-doe === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (321.06s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.96s) --- PASS: kuttl/harness/collector-otlp-production-grpc (76.74s) --- PASS: kuttl/harness/set-custom-img (49.49s) --- PASS: kuttl/harness/collector-otlp-production-http (64.82s) --- PASS: kuttl/harness/collector-otlp-allinone-grpc (41.94s) --- PASS: kuttl/harness/collector-otlp-allinone-http (38.79s) --- PASS: kuttl/harness/collector-autoscale (43.28s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name miscellaneous --report --output /logs/artifacts/miscellaneous.xml ./artifacts/kuttl-report.xml time="2023-10-02T13:46:01Z" level=debug msg="Setting a new name for the test suites" time="2023-10-02T13:46:01Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-10-02T13:46:01Z" level=debug msg="normalizing test case names" time="2023-10-02T13:46:01Z" level=debug msg="miscellaneous/artifacts -> miscellaneous_artifacts" time="2023-10-02T13:46:01Z" level=debug msg="miscellaneous/collector-otlp-production-grpc -> miscellaneous_collector_otlp_production_grpc" time="2023-10-02T13:46:01Z" level=debug msg="miscellaneous/set-custom-img -> miscellaneous_set_custom_img" time="2023-10-02T13:46:01Z" level=debug msg="miscellaneous/collector-otlp-production-http -> miscellaneous_collector_otlp_production_http" time="2023-10-02T13:46:01Z" level=debug msg="miscellaneous/collector-otlp-allinone-grpc -> miscellaneous_collector_otlp_allinone_grpc" time="2023-10-02T13:46:01Z" level=debug msg="miscellaneous/collector-otlp-allinone-http -> miscellaneous_collector_otlp_allinone_http" time="2023-10-02T13:46:01Z" level=debug msg="miscellaneous/collector-autoscale -> miscellaneous_collector_autoscale" +----------------------------------------------+--------+ | NAME | RESULT | +----------------------------------------------+--------+ | miscellaneous_artifacts | passed | | miscellaneous_collector_otlp_production_grpc | passed | | miscellaneous_set_custom_img | passed | | miscellaneous_collector_otlp_production_http | passed | | miscellaneous_collector_otlp_allinone_grpc | passed | | miscellaneous_collector_otlp_allinone_http | passed | | miscellaneous_collector_autoscale | passed | +----------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 1 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 3 -gt 0 ']' + count=2 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + '[' 2 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh sidecar false true + '[' 3 -ne 3 ']' + test_suite_name=sidecar + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/sidecar.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-sidecar make[2]: Entering directory '/tmp/jaeger-tests' ./tests/e2e/sidecar/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-29-231104 True False 71m Cluster version is 4.14.0-0.nightly-2023-09-29-231104' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-29-231104 True False 71m Cluster version is 4.14.0-0.nightly-2023-09-29-231104' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/sidecar/render.sh ++ export SUITE_DIR=./tests/e2e/sidecar ++ SUITE_DIR=./tests/e2e/sidecar ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/sidecar ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + jaeger_service_name=order + start_test sidecar-deployment + '[' 1 -ne 1 ']' + test_name=sidecar-deployment + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-deployment' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-deployment\e[0m' Rendering files for test sidecar-deployment + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build + '[' _build '!=' _build ']' + mkdir -p sidecar-deployment + cd sidecar-deployment + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-namespace + '[' 1 -ne 1 ']' + test_name=sidecar-namespace + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-namespace' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-namespace\e[0m' Rendering files for test sidecar-namespace + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-deployment + '[' sidecar-deployment '!=' _build ']' + cd .. + mkdir -p sidecar-namespace + cd sidecar-namespace + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-skip-webhook + '[' 1 -ne 1 ']' + test_name=sidecar-skip-webhook + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-skip-webhook' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-skip-webhook\e[0m' Rendering files for test sidecar-skip-webhook + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-namespace + '[' sidecar-namespace '!=' _build ']' + cd .. + mkdir -p sidecar-skip-webhook + cd sidecar-skip-webhook + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running sidecar E2E tests' Running sidecar E2E tests + cd tests/e2e/sidecar/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3595368357 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 4 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/sidecar-deployment === PAUSE kuttl/harness/sidecar-deployment === RUN kuttl/harness/sidecar-namespace === PAUSE kuttl/harness/sidecar-namespace === RUN kuttl/harness/sidecar-skip-webhook === PAUSE kuttl/harness/sidecar-skip-webhook === CONT kuttl/harness/artifacts logger.go:42: 13:46:09 | artifacts | Creating namespace: kuttl-test-fitting-cockatoo logger.go:42: 13:46:09 | artifacts | artifacts events from ns kuttl-test-fitting-cockatoo: logger.go:42: 13:46:09 | artifacts | Deleting namespace: kuttl-test-fitting-cockatoo === CONT kuttl/harness/sidecar-namespace logger.go:42: 13:46:15 | sidecar-namespace | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:46:15 | sidecar-namespace | Creating namespace: kuttl-test-notable-seal logger.go:42: 13:46:15 | sidecar-namespace/0-install | starting test step 0-install logger.go:42: 13:46:15 | sidecar-namespace/0-install | Jaeger:kuttl-test-notable-seal/agent-as-sidecar created logger.go:42: 13:46:21 | sidecar-namespace/0-install | test step completed 0-install logger.go:42: 13:46:21 | sidecar-namespace/1-install | starting test step 1-install logger.go:42: 13:46:21 | sidecar-namespace/1-install | Deployment:kuttl-test-notable-seal/vertx-create-span-sidecar created logger.go:42: 13:46:23 | sidecar-namespace/1-install | test step completed 1-install logger.go:42: 13:46:23 | sidecar-namespace/2-enable-injection | starting test step 2-enable-injection logger.go:42: 13:46:23 | sidecar-namespace/2-enable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="true"] logger.go:42: 13:46:23 | sidecar-namespace/2-enable-injection | namespace/kuttl-test-notable-seal annotated logger.go:42: 13:46:25 | sidecar-namespace/2-enable-injection | test step completed 2-enable-injection logger.go:42: 13:46:25 | sidecar-namespace/3-find-service | starting test step 3-find-service logger.go:42: 13:46:25 | sidecar-namespace/3-find-service | Job:kuttl-test-notable-seal/00-find-service created logger.go:42: 13:46:37 | sidecar-namespace/3-find-service | test step completed 3-find-service logger.go:42: 13:46:37 | sidecar-namespace/4-other-instance | starting test step 4-other-instance logger.go:42: 13:46:37 | sidecar-namespace/4-other-instance | Jaeger:kuttl-test-notable-seal/agent-as-sidecar2 created logger.go:42: 13:46:46 | sidecar-namespace/4-other-instance | test step completed 4-other-instance logger.go:42: 13:46:46 | sidecar-namespace/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 13:46:47 | sidecar-namespace/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 13:46:47 | sidecar-namespace/6-find-service | starting test step 6-find-service logger.go:42: 13:46:47 | sidecar-namespace/6-find-service | Job:kuttl-test-notable-seal/01-find-service created logger.go:42: 13:47:07 | sidecar-namespace/6-find-service | test step completed 6-find-service logger.go:42: 13:47:07 | sidecar-namespace/7-disable-injection | starting test step 7-disable-injection logger.go:42: 13:47:07 | sidecar-namespace/7-disable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="false"] logger.go:42: 13:47:07 | sidecar-namespace/7-disable-injection | namespace/kuttl-test-notable-seal annotated logger.go:42: 13:47:09 | sidecar-namespace/7-disable-injection | test step completed 7-disable-injection logger.go:42: 13:47:10 | sidecar-namespace | sidecar-namespace events from ns kuttl-test-notable-seal: logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:19 +0000 UTC Normal Pod agent-as-sidecar-967f5786c-9v4cl Scheduled Successfully assigned kuttl-test-notable-seal/agent-as-sidecar-967f5786c-9v4cl to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:19 +0000 UTC Normal Pod agent-as-sidecar-967f5786c-9v4cl AddedInterface Add eth0 [10.131.0.64/23] from ovn-kubernetes logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:19 +0000 UTC Normal Pod agent-as-sidecar-967f5786c-9v4cl.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:19 +0000 UTC Normal Pod agent-as-sidecar-967f5786c-9v4cl.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:19 +0000 UTC Normal Pod agent-as-sidecar-967f5786c-9v4cl.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:19 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-967f5786c SuccessfulCreate Created pod: agent-as-sidecar-967f5786c-9v4cl replicaset-controller logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:19 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-967f5786c to 1 deployment-controller logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:21 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-56xz4 Scheduled Successfully assigned kuttl-test-notable-seal/vertx-create-span-sidecar-568b7c9f6f-56xz4 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:21 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-568b7c9f6f SuccessfulCreate Created pod: vertx-create-span-sidecar-568b7c9f6f-56xz4 replicaset-controller logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:21 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-568b7c9f6f to 1 deployment-controller logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:22 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-56xz4 AddedInterface Add eth0 [10.129.2.106/23] from ovn-kubernetes logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:22 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-56xz4.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:22 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-56xz4.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:22 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-56xz4.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:23 +0000 UTC Normal Pod vertx-create-span-sidecar-5c68d998f6-gzj9n Scheduled Successfully assigned kuttl-test-notable-seal/vertx-create-span-sidecar-5c68d998f6-gzj9n to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:23 +0000 UTC Normal Pod vertx-create-span-sidecar-5c68d998f6-gzj9n AddedInterface Add eth0 [10.128.2.57/23] from ovn-kubernetes logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:23 +0000 UTC Normal Pod vertx-create-span-sidecar-5c68d998f6-gzj9n.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:23 +0000 UTC Normal Pod vertx-create-span-sidecar-5c68d998f6-gzj9n.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:23 +0000 UTC Normal Pod vertx-create-span-sidecar-5c68d998f6-gzj9n.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:23 +0000 UTC Normal Pod vertx-create-span-sidecar-5c68d998f6-gzj9n.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:23 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-5c68d998f6 SuccessfulCreate Created pod: vertx-create-span-sidecar-5c68d998f6-gzj9n replicaset-controller logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:23 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-5c68d998f6 to 1 deployment-controller logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:24 +0000 UTC Normal Pod vertx-create-span-sidecar-5c68d998f6-gzj9n.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:24 +0000 UTC Normal Pod vertx-create-span-sidecar-5c68d998f6-gzj9n.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:25 +0000 UTC Normal Pod 00-find-service-l5jmk Scheduled Successfully assigned kuttl-test-notable-seal/00-find-service-l5jmk to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:25 +0000 UTC Normal Pod 00-find-service-l5jmk AddedInterface Add eth0 [10.129.2.107/23] from ovn-kubernetes logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:25 +0000 UTC Normal Pod 00-find-service-l5jmk.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:25 +0000 UTC Normal Pod 00-find-service-l5jmk.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:25 +0000 UTC Normal Pod 00-find-service-l5jmk.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:25 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-l5jmk job-controller logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:30 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-56xz4.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.106:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:30 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-56xz4.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.129.2.106:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:31 +0000 UTC Warning Pod vertx-create-span-sidecar-5c68d998f6-gzj9n.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.57:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:31 +0000 UTC Warning Pod vertx-create-span-sidecar-5c68d998f6-gzj9n.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.57:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:32 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-56xz4.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:32 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-56xz4.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.106:8080/": read tcp 10.129.2.2:35316->10.129.2.106:8080: read: connection reset by peer kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:32 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-56xz4.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.106:8080/": dial tcp 10.129.2.106:8080: connect: connection refused kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:33 +0000 UTC Normal Pod vertx-create-span-sidecar-5c68d998f6-gzj9n.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:34 +0000 UTC Warning Pod vertx-create-span-sidecar-5c68d998f6-gzj9n.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.57:8080/": read tcp 10.128.2.2:38812->10.128.2.57:8080: read: connection reset by peer kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:34 +0000 UTC Warning Pod vertx-create-span-sidecar-5c68d998f6-gzj9n.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.57:8080/": dial tcp 10.128.2.57:8080: connect: connection refused kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:37 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:43 +0000 UTC Normal Pod agent-as-sidecar2-699ffdb56c-sr4w6 Scheduled Successfully assigned kuttl-test-notable-seal/agent-as-sidecar2-699ffdb56c-sr4w6 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:43 +0000 UTC Normal Pod agent-as-sidecar2-699ffdb56c-sr4w6 AddedInterface Add eth0 [10.129.2.108/23] from ovn-kubernetes logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:43 +0000 UTC Normal Pod agent-as-sidecar2-699ffdb56c-sr4w6.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:43 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-699ffdb56c SuccessfulCreate Created pod: agent-as-sidecar2-699ffdb56c-sr4w6 replicaset-controller logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:43 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-699ffdb56c to 1 deployment-controller logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:43 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-56xz4.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.106:8080/": read tcp 10.129.2.2:51192->10.129.2.106:8080: read: connection reset by peer kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:44 +0000 UTC Normal Pod agent-as-sidecar2-699ffdb56c-sr4w6.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:44 +0000 UTC Normal Pod agent-as-sidecar2-699ffdb56c-sr4w6.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:46 +0000 UTC Normal Pod agent-as-sidecar-967f5786c-9v4cl.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:47 +0000 UTC Normal Pod 01-find-service-48x5s Scheduled Successfully assigned kuttl-test-notable-seal/01-find-service-48x5s to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:47 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-48x5s job-controller logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:48 +0000 UTC Normal Pod 01-find-service-48x5s AddedInterface Add eth0 [10.131.0.65/23] from ovn-kubernetes logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:48 +0000 UTC Normal Pod 01-find-service-48x5s.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:48 +0000 UTC Normal Pod 01-find-service-48x5s.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:48 +0000 UTC Normal Pod 01-find-service-48x5s.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:51 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-568b7c9f6f SuccessfulDelete Deleted pod: vertx-create-span-sidecar-568b7c9f6f-56xz4 replicaset-controller logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:51 +0000 UTC Normal Pod vertx-create-span-sidecar-594589bf74-5jcwb Scheduled Successfully assigned kuttl-test-notable-seal/vertx-create-span-sidecar-594589bf74-5jcwb to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:51 +0000 UTC Normal Pod vertx-create-span-sidecar-594589bf74-5jcwb AddedInterface Add eth0 [10.131.0.66/23] from ovn-kubernetes logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:51 +0000 UTC Normal Pod vertx-create-span-sidecar-594589bf74-5jcwb.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:51 +0000 UTC Normal Pod vertx-create-span-sidecar-594589bf74-5jcwb.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:51 +0000 UTC Normal Pod vertx-create-span-sidecar-594589bf74-5jcwb.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:51 +0000 UTC Normal Pod vertx-create-span-sidecar-594589bf74-5jcwb.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:51 +0000 UTC Normal Pod vertx-create-span-sidecar-594589bf74-5jcwb.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:51 +0000 UTC Normal Pod vertx-create-span-sidecar-594589bf74-5jcwb.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:51 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-594589bf74 SuccessfulCreate Created pod: vertx-create-span-sidecar-594589bf74-5jcwb replicaset-controller logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:51 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-568b7c9f6f to 0 from 1 deployment-controller logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:51 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-594589bf74 to 1 from 0 deployment-controller logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:59 +0000 UTC Warning Pod vertx-create-span-sidecar-594589bf74-5jcwb.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.66:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:46:59 +0000 UTC Warning Pod vertx-create-span-sidecar-594589bf74-5jcwb.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.66:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:47:01 +0000 UTC Normal Pod vertx-create-span-sidecar-594589bf74-5jcwb.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:47:02 +0000 UTC Warning Pod vertx-create-span-sidecar-594589bf74-5jcwb.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.66:8080/": read tcp 10.131.0.2:32928->10.131.0.66:8080: read: connection reset by peer kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:47:02 +0000 UTC Warning Pod vertx-create-span-sidecar-594589bf74-5jcwb.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.66:8080/": dial tcp 10.131.0.66:8080: connect: connection refused kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:47:07 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:47:07 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-5c68d998f6 SuccessfulDelete Deleted pod: vertx-create-span-sidecar-5c68d998f6-gzj9n replicaset-controller logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:47:07 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-5c68d998f6 to 0 from 1 deployment-controller logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:47:07 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-745ff89d9f to 1 from 0 deployment-controller logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:47:08 +0000 UTC Normal Pod vertx-create-span-sidecar-745ff89d9f-d4plr Scheduled Successfully assigned kuttl-test-notable-seal/vertx-create-span-sidecar-745ff89d9f-d4plr to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:47:08 +0000 UTC Normal Pod vertx-create-span-sidecar-745ff89d9f-d4plr AddedInterface Add eth0 [10.129.2.109/23] from ovn-kubernetes logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:47:08 +0000 UTC Normal Pod vertx-create-span-sidecar-745ff89d9f-d4plr.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:47:08 +0000 UTC Normal Pod vertx-create-span-sidecar-745ff89d9f-d4plr.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:47:08 +0000 UTC Normal Pod vertx-create-span-sidecar-745ff89d9f-d4plr.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 13:47:10 | sidecar-namespace | 2023-10-02 13:47:08 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-745ff89d9f SuccessfulCreate Created pod: vertx-create-span-sidecar-745ff89d9f-d4plr replicaset-controller logger.go:42: 13:47:10 | sidecar-namespace | Deleting namespace: kuttl-test-notable-seal === CONT kuttl/harness/sidecar-skip-webhook logger.go:42: 13:47:16 | sidecar-skip-webhook | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:47:16 | sidecar-skip-webhook | Creating namespace: kuttl-test-immune-narwhal logger.go:42: 13:47:17 | sidecar-skip-webhook/0-install | starting test step 0-install logger.go:42: 13:47:17 | sidecar-skip-webhook/0-install | Jaeger:kuttl-test-immune-narwhal/agent-as-sidecar created logger.go:42: 13:47:24 | sidecar-skip-webhook/0-install | test step completed 0-install logger.go:42: 13:47:24 | sidecar-skip-webhook/1-install | starting test step 1-install logger.go:42: 13:47:24 | sidecar-skip-webhook/1-install | Deployment:kuttl-test-immune-narwhal/vertx-create-span-sidecar created logger.go:42: 13:47:26 | sidecar-skip-webhook/1-install | test step completed 1-install logger.go:42: 13:47:26 | sidecar-skip-webhook/2-add-anotation-and-label | starting test step 2-add-anotation-and-label logger.go:42: 13:47:26 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name=jaeger-operator --namespace kuttl-test-immune-narwhal] logger.go:42: 13:47:26 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar labeled logger.go:42: 13:47:26 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-immune-narwhal] logger.go:42: 13:47:26 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar annotated logger.go:42: 13:47:26 | sidecar-skip-webhook/2-add-anotation-and-label | test step completed 2-add-anotation-and-label logger.go:42: 13:47:26 | sidecar-skip-webhook/3-remove-label | starting test step 3-remove-label logger.go:42: 13:47:26 | sidecar-skip-webhook/3-remove-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name- --namespace kuttl-test-immune-narwhal] logger.go:42: 13:47:26 | sidecar-skip-webhook/3-remove-label | deployment.apps/vertx-create-span-sidecar unlabeled logger.go:42: 13:47:28 | sidecar-skip-webhook/3-remove-label | test step completed 3-remove-label logger.go:42: 13:47:28 | sidecar-skip-webhook | sidecar-skip-webhook events from ns kuttl-test-immune-narwhal: logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:20 +0000 UTC Normal Pod agent-as-sidecar-7bdb7f97c-f56fp Scheduled Successfully assigned kuttl-test-immune-narwhal/agent-as-sidecar-7bdb7f97c-f56fp to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:20 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-7bdb7f97c SuccessfulCreate Created pod: agent-as-sidecar-7bdb7f97c-f56fp replicaset-controller logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:20 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-7bdb7f97c to 1 deployment-controller logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:21 +0000 UTC Normal Pod agent-as-sidecar-7bdb7f97c-f56fp AddedInterface Add eth0 [10.129.2.110/23] from ovn-kubernetes logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:21 +0000 UTC Normal Pod agent-as-sidecar-7bdb7f97c-f56fp.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:21 +0000 UTC Normal Pod agent-as-sidecar-7bdb7f97c-f56fp.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:21 +0000 UTC Normal Pod agent-as-sidecar-7bdb7f97c-f56fp.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:24 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-trtlp Scheduled Successfully assigned kuttl-test-immune-narwhal/vertx-create-span-sidecar-568b7c9f6f-trtlp to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:24 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-trtlp AddedInterface Add eth0 [10.131.0.67/23] from ovn-kubernetes logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:24 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-trtlp.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:24 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-trtlp.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:24 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-trtlp.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:24 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-568b7c9f6f SuccessfulCreate Created pod: vertx-create-span-sidecar-568b7c9f6f-trtlp replicaset-controller logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:24 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-568b7c9f6f to 1 deployment-controller logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:26 +0000 UTC Normal Pod vertx-create-span-sidecar-cf76988f8-2plt7 Scheduled Successfully assigned kuttl-test-immune-narwhal/vertx-create-span-sidecar-cf76988f8-2plt7 to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:26 +0000 UTC Normal Pod vertx-create-span-sidecar-cf76988f8-2plt7 AddedInterface Add eth0 [10.128.2.58/23] from ovn-kubernetes logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:26 +0000 UTC Normal Pod vertx-create-span-sidecar-cf76988f8-2plt7.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:26 +0000 UTC Normal Pod vertx-create-span-sidecar-cf76988f8-2plt7.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:26 +0000 UTC Normal Pod vertx-create-span-sidecar-cf76988f8-2plt7.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:26 +0000 UTC Normal Pod vertx-create-span-sidecar-cf76988f8-2plt7.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:26 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-cf76988f8 SuccessfulCreate Created pod: vertx-create-span-sidecar-cf76988f8-2plt7 replicaset-controller logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:26 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-cf76988f8 to 1 deployment-controller logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:27 +0000 UTC Normal Pod vertx-create-span-sidecar-cf76988f8-2plt7.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:47:28 | sidecar-skip-webhook | 2023-10-02 13:47:27 +0000 UTC Normal Pod vertx-create-span-sidecar-cf76988f8-2plt7.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:47:28 | sidecar-skip-webhook | Deleting namespace: kuttl-test-immune-narwhal === CONT kuttl/harness/sidecar-deployment logger.go:42: 13:47:35 | sidecar-deployment | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:47:35 | sidecar-deployment | Creating namespace: kuttl-test-moved-muskrat logger.go:42: 13:47:35 | sidecar-deployment/0-install | starting test step 0-install logger.go:42: 13:47:35 | sidecar-deployment/0-install | Jaeger:kuttl-test-moved-muskrat/agent-as-sidecar created logger.go:42: 13:47:41 | sidecar-deployment/0-install | test step completed 0-install logger.go:42: 13:47:41 | sidecar-deployment/1-install | starting test step 1-install logger.go:42: 13:47:41 | sidecar-deployment/1-install | Deployment:kuttl-test-moved-muskrat/vertx-create-span-sidecar created logger.go:42: 13:47:42 | sidecar-deployment/1-install | test step completed 1-install logger.go:42: 13:47:42 | sidecar-deployment/2-enable-injection | starting test step 2-enable-injection logger.go:42: 13:47:42 | sidecar-deployment/2-enable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-moved-muskrat] logger.go:42: 13:47:42 | sidecar-deployment/2-enable-injection | deployment.apps/vertx-create-span-sidecar annotated logger.go:42: 13:47:44 | sidecar-deployment/2-enable-injection | test step completed 2-enable-injection logger.go:42: 13:47:44 | sidecar-deployment/3-find-service | starting test step 3-find-service logger.go:42: 13:47:44 | sidecar-deployment/3-find-service | Job:kuttl-test-moved-muskrat/00-find-service created logger.go:42: 13:47:56 | sidecar-deployment/3-find-service | test step completed 3-find-service logger.go:42: 13:47:56 | sidecar-deployment/4-other-instance | starting test step 4-other-instance logger.go:42: 13:47:56 | sidecar-deployment/4-other-instance | Jaeger:kuttl-test-moved-muskrat/agent-as-sidecar2 created logger.go:42: 13:48:02 | sidecar-deployment/4-other-instance | test step completed 4-other-instance logger.go:42: 13:48:02 | sidecar-deployment/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 13:48:03 | sidecar-deployment/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 13:48:03 | sidecar-deployment/6-find-service | starting test step 6-find-service logger.go:42: 13:48:03 | sidecar-deployment/6-find-service | Job:kuttl-test-moved-muskrat/01-find-service created logger.go:42: 13:48:24 | sidecar-deployment/6-find-service | test step completed 6-find-service logger.go:42: 13:48:24 | sidecar-deployment/7-disable-injection | starting test step 7-disable-injection logger.go:42: 13:48:24 | sidecar-deployment/7-disable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=false --namespace kuttl-test-moved-muskrat] logger.go:42: 13:48:24 | sidecar-deployment/7-disable-injection | deployment.apps/vertx-create-span-sidecar annotated logger.go:42: 13:48:26 | sidecar-deployment/7-disable-injection | test step completed 7-disable-injection logger.go:42: 13:48:26 | sidecar-deployment | sidecar-deployment events from ns kuttl-test-moved-muskrat: logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:39 +0000 UTC Normal Pod agent-as-sidecar-67cb6d9b69-d8xdv Scheduled Successfully assigned kuttl-test-moved-muskrat/agent-as-sidecar-67cb6d9b69-d8xdv to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:39 +0000 UTC Normal Pod agent-as-sidecar-67cb6d9b69-d8xdv AddedInterface Add eth0 [10.129.2.111/23] from ovn-kubernetes logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:39 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-67cb6d9b69 SuccessfulCreate Created pod: agent-as-sidecar-67cb6d9b69-d8xdv replicaset-controller logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:39 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-67cb6d9b69 to 1 deployment-controller logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:40 +0000 UTC Normal Pod agent-as-sidecar-67cb6d9b69-d8xdv.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:40 +0000 UTC Normal Pod agent-as-sidecar-67cb6d9b69-d8xdv.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:40 +0000 UTC Normal Pod agent-as-sidecar-67cb6d9b69-d8xdv.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:41 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-7lj58 Scheduled Successfully assigned kuttl-test-moved-muskrat/vertx-create-span-sidecar-568b7c9f6f-7lj58 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:41 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-7lj58 AddedInterface Add eth0 [10.131.0.68/23] from ovn-kubernetes logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:41 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-7lj58.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:41 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-568b7c9f6f SuccessfulCreate Created pod: vertx-create-span-sidecar-568b7c9f6f-7lj58 replicaset-controller logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:41 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-568b7c9f6f to 1 deployment-controller logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:42 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-7lj58.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:42 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-7lj58.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:42 +0000 UTC Normal Pod vertx-create-span-sidecar-59db8cf9f6-2xcp6 Scheduled Successfully assigned kuttl-test-moved-muskrat/vertx-create-span-sidecar-59db8cf9f6-2xcp6 to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:42 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-59db8cf9f6 SuccessfulCreate Created pod: vertx-create-span-sidecar-59db8cf9f6-2xcp6 replicaset-controller logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:42 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-59db8cf9f6 to 1 deployment-controller logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:43 +0000 UTC Normal Pod vertx-create-span-sidecar-59db8cf9f6-2xcp6 AddedInterface Add eth0 [10.128.2.59/23] from ovn-kubernetes logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:43 +0000 UTC Normal Pod vertx-create-span-sidecar-59db8cf9f6-2xcp6.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:43 +0000 UTC Normal Pod vertx-create-span-sidecar-59db8cf9f6-2xcp6.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:43 +0000 UTC Normal Pod vertx-create-span-sidecar-59db8cf9f6-2xcp6.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:43 +0000 UTC Normal Pod vertx-create-span-sidecar-59db8cf9f6-2xcp6.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:43 +0000 UTC Normal Pod vertx-create-span-sidecar-59db8cf9f6-2xcp6.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:43 +0000 UTC Normal Pod vertx-create-span-sidecar-59db8cf9f6-2xcp6.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:44 +0000 UTC Normal Pod 00-find-service-pb48q Scheduled Successfully assigned kuttl-test-moved-muskrat/00-find-service-pb48q to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:44 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-pb48q job-controller logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:45 +0000 UTC Normal Pod 00-find-service-pb48q AddedInterface Add eth0 [10.131.0.69/23] from ovn-kubernetes logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:45 +0000 UTC Normal Pod 00-find-service-pb48q.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:45 +0000 UTC Normal Pod 00-find-service-pb48q.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:45 +0000 UTC Normal Pod 00-find-service-pb48q.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:50 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-7lj58.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.68:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:50 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-7lj58.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.68:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:51 +0000 UTC Warning Pod vertx-create-span-sidecar-59db8cf9f6-2xcp6.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.59:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:51 +0000 UTC Warning Pod vertx-create-span-sidecar-59db8cf9f6-2xcp6.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.59:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:53 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-7lj58.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:53 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-7lj58.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.68:8080/": read tcp 10.131.0.2:48410->10.131.0.68:8080: read: connection reset by peer kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:53 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-7lj58.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.68:8080/": dial tcp 10.131.0.68:8080: connect: connection refused kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:53 +0000 UTC Normal Pod vertx-create-span-sidecar-59db8cf9f6-2xcp6.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:53 +0000 UTC Warning Pod vertx-create-span-sidecar-59db8cf9f6-2xcp6.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.59:8080/": read tcp 10.128.2.2:45604->10.128.2.59:8080: read: connection reset by peer kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:53 +0000 UTC Warning Pod vertx-create-span-sidecar-59db8cf9f6-2xcp6.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.59:8080/": dial tcp 10.128.2.59:8080: connect: connection refused kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:47:56 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:00 +0000 UTC Normal Pod agent-as-sidecar2-8b695c6b5-ljzk6 Scheduled Successfully assigned kuttl-test-moved-muskrat/agent-as-sidecar2-8b695c6b5-ljzk6 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:00 +0000 UTC Normal Pod agent-as-sidecar2-8b695c6b5-ljzk6 AddedInterface Add eth0 [10.131.0.70/23] from ovn-kubernetes logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:00 +0000 UTC Normal Pod agent-as-sidecar2-8b695c6b5-ljzk6.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:00 +0000 UTC Normal Pod agent-as-sidecar2-8b695c6b5-ljzk6.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:00 +0000 UTC Normal Pod agent-as-sidecar2-8b695c6b5-ljzk6.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:00 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-8b695c6b5 SuccessfulCreate Created pod: agent-as-sidecar2-8b695c6b5-ljzk6 replicaset-controller logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:00 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-8b695c6b5 to 1 deployment-controller logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:03 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-4mltg job-controller logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:03 +0000 UTC Normal Pod agent-as-sidecar-67cb6d9b69-d8xdv.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:04 +0000 UTC Normal Pod 01-find-service-4mltg Scheduled Successfully assigned kuttl-test-moved-muskrat/01-find-service-4mltg to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:04 +0000 UTC Normal Pod 01-find-service-4mltg AddedInterface Add eth0 [10.129.2.112/23] from ovn-kubernetes logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:04 +0000 UTC Normal Pod 01-find-service-4mltg.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-69k515z0/pipeline@sha256:c4a90d7a9a176bd2f38a37140102bbf9c37781112ec10bc94db673c51c813678" already present on machine kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:04 +0000 UTC Normal Pod 01-find-service-4mltg.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:04 +0000 UTC Normal Pod 01-find-service-4mltg.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:04 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-7lj58.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.68:8080/": read tcp 10.131.0.2:52780->10.131.0.68:8080: read: connection reset by peer kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:06 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-568b7c9f6f SuccessfulDelete Deleted pod: vertx-create-span-sidecar-568b7c9f6f-7lj58 replicaset-controller logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:06 +0000 UTC Normal Pod vertx-create-span-sidecar-79cbf7fcfd-t6h4h Scheduled Successfully assigned kuttl-test-moved-muskrat/vertx-create-span-sidecar-79cbf7fcfd-t6h4h to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:06 +0000 UTC Normal Pod vertx-create-span-sidecar-79cbf7fcfd-t6h4h AddedInterface Add eth0 [10.129.2.113/23] from ovn-kubernetes logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:06 +0000 UTC Normal Pod vertx-create-span-sidecar-79cbf7fcfd-t6h4h.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:06 +0000 UTC Normal Pod vertx-create-span-sidecar-79cbf7fcfd-t6h4h.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:06 +0000 UTC Normal Pod vertx-create-span-sidecar-79cbf7fcfd-t6h4h.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:06 +0000 UTC Normal Pod vertx-create-span-sidecar-79cbf7fcfd-t6h4h.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:06 +0000 UTC Normal Pod vertx-create-span-sidecar-79cbf7fcfd-t6h4h.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:06 +0000 UTC Normal Pod vertx-create-span-sidecar-79cbf7fcfd-t6h4h.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:06 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-79cbf7fcfd SuccessfulCreate Created pod: vertx-create-span-sidecar-79cbf7fcfd-t6h4h replicaset-controller logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:06 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-568b7c9f6f to 0 from 1 deployment-controller logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:06 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-79cbf7fcfd to 1 from 0 deployment-controller logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:14 +0000 UTC Warning Pod vertx-create-span-sidecar-79cbf7fcfd-t6h4h.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.113:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:14 +0000 UTC Warning Pod vertx-create-span-sidecar-79cbf7fcfd-t6h4h.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.129.2.113:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:16 +0000 UTC Normal Pod vertx-create-span-sidecar-79cbf7fcfd-t6h4h.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:17 +0000 UTC Warning Pod vertx-create-span-sidecar-79cbf7fcfd-t6h4h.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.113:8080/": read tcp 10.129.2.2:47886->10.129.2.113:8080: read: connection reset by peer kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:17 +0000 UTC Warning Pod vertx-create-span-sidecar-79cbf7fcfd-t6h4h.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.113:8080/": dial tcp 10.129.2.113:8080: connect: connection refused kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:23 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:24 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-59db8cf9f6 SuccessfulDelete Deleted pod: vertx-create-span-sidecar-59db8cf9f6-2xcp6 replicaset-controller logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:24 +0000 UTC Normal Pod vertx-create-span-sidecar-754b7cd889-8h6cf Scheduled Successfully assigned kuttl-test-moved-muskrat/vertx-create-span-sidecar-754b7cd889-8h6cf to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:24 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-754b7cd889 SuccessfulCreate Created pod: vertx-create-span-sidecar-754b7cd889-8h6cf replicaset-controller logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:24 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-59db8cf9f6 to 0 from 1 deployment-controller logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:24 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-754b7cd889 to 1 from 0 deployment-controller logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:25 +0000 UTC Normal Pod vertx-create-span-sidecar-754b7cd889-8h6cf AddedInterface Add eth0 [10.131.0.71/23] from ovn-kubernetes logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:25 +0000 UTC Normal Pod vertx-create-span-sidecar-754b7cd889-8h6cf.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:25 +0000 UTC Normal Pod vertx-create-span-sidecar-754b7cd889-8h6cf.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 13:48:26 | sidecar-deployment | 2023-10-02 13:48:25 +0000 UTC Normal Pod vertx-create-span-sidecar-754b7cd889-8h6cf.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 13:48:26 | sidecar-deployment | Deleting namespace: kuttl-test-moved-muskrat === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (143.82s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.17s) --- PASS: kuttl/harness/sidecar-namespace (61.30s) --- PASS: kuttl/harness/sidecar-skip-webhook (18.36s) --- PASS: kuttl/harness/sidecar-deployment (57.93s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name sidecar --report --output /logs/artifacts/sidecar.xml ./artifacts/kuttl-report.xml time="2023-10-02T13:48:33Z" level=debug msg="Setting a new name for the test suites" time="2023-10-02T13:48:33Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-10-02T13:48:33Z" level=debug msg="normalizing test case names" time="2023-10-02T13:48:33Z" level=debug msg="sidecar/artifacts -> sidecar_artifacts" time="2023-10-02T13:48:33Z" level=debug msg="sidecar/sidecar-namespace -> sidecar_sidecar_namespace" time="2023-10-02T13:48:33Z" level=debug msg="sidecar/sidecar-skip-webhook -> sidecar_sidecar_skip_webhook" time="2023-10-02T13:48:33Z" level=debug msg="sidecar/sidecar-deployment -> sidecar_sidecar_deployment" +------------------------------+--------+ | NAME | RESULT | +------------------------------+--------+ | sidecar_artifacts | passed | | sidecar_sidecar_namespace | passed | | sidecar_sidecar_skip_webhook | passed | | sidecar_sidecar_deployment | passed | +------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 1 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 3 -gt 0 ']' + count=2 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/sidecar.xml + '[' 0 -gt 0 ']' + '[' 2 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh streaming false true + '[' 3 -ne 3 ']' + test_suite_name=streaming + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/streaming.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-streaming make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=0.32.0 \ SKIP_KAFKA=false \ SKIP_ES_EXTERNAL=true \ ./tests/e2e/streaming/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-29-231104 True False 74m Cluster version is 4.14.0-0.nightly-2023-09-29-231104' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-29-231104 True False 74m Cluster version is 4.14.0-0.nightly-2023-09-29-231104' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 0.32.0 ']' ++ version_le 0.32.0 0.25.0 +++ echo 0.32.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 0.32.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/streaming/render.sh ++ export SUITE_DIR=./tests/e2e/streaming ++ SUITE_DIR=./tests/e2e/streaming ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/streaming ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + '[' false = true ']' + start_test streaming-simple + '[' 1 -ne 1 ']' + test_name=streaming-simple + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-simple' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-simple\e[0m' Rendering files for test streaming-simple + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build + '[' _build '!=' _build ']' + mkdir -p streaming-simple + cd streaming-simple + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./1-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/streaming-jaeger-assert.yaml.template -o ./04-assert.yaml + render_smoke_test simple-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=simple-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + export JAEGER_NAME=simple-streaming + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-tls + '[' 1 -ne 1 ']' + test_name=streaming-with-tls + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-tls' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-tls\e[0m' Rendering files for test streaming-with-tls + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-simple + '[' streaming-simple '!=' _build ']' + cd .. + mkdir -p streaming-with-tls + cd streaming-with-tls + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./1-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + render_smoke_test tls-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=tls-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + export JAEGER_NAME=tls-streaming + JAEGER_NAME=tls-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-autoprovisioning-autoscale + '[' 1 -ne 1 ']' + test_name=streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-autoprovisioning-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-autoprovisioning-autoscale\e[0m' Rendering files for test streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-with-tls + '[' streaming-with-tls '!=' _build ']' + cd .. + mkdir -p streaming-with-autoprovisioning-autoscale + cd streaming-with-autoprovisioning-autoscale + '[' true = true ']' + rm ./00-install.yaml ./00-assert.yaml + render_install_elasticsearch upstream 01 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=01 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./01-assert.yaml + jaeger_name=auto-provisioned + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="20Mi"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="500m"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.autoscale=true ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.minReplicas=1 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.maxReplicas=2 ./02-install.yaml + render_assert_kafka true auto-provisioned 03 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provisioned + test_step=03 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./03-assert.yaml ++ expr 03 + 1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./4-assert.yaml ++ expr 03 + 2 + CLUSTER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./05-assert.yaml + render_install_tracegen auto-provisioned 06 + '[' 2 -ne 2 ']' + jaeger=auto-provisioned + step=06 + replicas=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/tracegen.yaml -o ./06-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.replicas=1 ./06-install.yaml + sed -i s~simple-prod~auto-provisioned~gi ./06-install.yaml + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-tracegen.yaml.template -o ./06-assert.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running streaming E2E tests' Running streaming E2E tests + cd tests/e2e/streaming/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3595368357 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 4 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/streaming-simple === PAUSE kuttl/harness/streaming-simple === RUN kuttl/harness/streaming-with-autoprovisioning-autoscale === PAUSE kuttl/harness/streaming-with-autoprovisioning-autoscale === RUN kuttl/harness/streaming-with-tls === PAUSE kuttl/harness/streaming-with-tls === CONT kuttl/harness/artifacts logger.go:42: 13:48:46 | artifacts | Creating namespace: kuttl-test-suitable-hedgehog logger.go:42: 13:48:46 | artifacts | artifacts events from ns kuttl-test-suitable-hedgehog: logger.go:42: 13:48:46 | artifacts | Deleting namespace: kuttl-test-suitable-hedgehog === CONT kuttl/harness/streaming-with-autoprovisioning-autoscale logger.go:42: 13:48:52 | streaming-with-autoprovisioning-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:48:52 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:48:52 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:48:52 | streaming-with-autoprovisioning-autoscale | Creating namespace: kuttl-test-desired-mastodon logger.go:42: 13:48:52 | streaming-with-autoprovisioning-autoscale/1-install | starting test step 1-install logger.go:42: 13:48:52 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc create sa deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 13:48:52 | streaming-with-autoprovisioning-autoscale/1-install | serviceaccount/deploy-elasticsearch created logger.go:42: 13:48:52 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc adm policy add-scc-to-user privileged -z deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 13:48:52 | streaming-with-autoprovisioning-autoscale/1-install | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:privileged added: "deploy-elasticsearch" logger.go:42: 13:48:52 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 6] logger.go:42: 13:48:58 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_0.yml -n $NAMESPACE] logger.go:42: 13:48:58 | streaming-with-autoprovisioning-autoscale/1-install | statefulset.apps/elasticsearch created logger.go:42: 13:48:58 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 3] logger.go:42: 13:49:01 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_1.yml -n $NAMESPACE] logger.go:42: 13:49:02 | streaming-with-autoprovisioning-autoscale/1-install | service/elasticsearch created logger.go:42: 13:49:19 | streaming-with-autoprovisioning-autoscale/1-install | test step completed 1-install logger.go:42: 13:49:19 | streaming-with-autoprovisioning-autoscale/2-install | starting test step 2-install logger.go:42: 13:49:19 | streaming-with-autoprovisioning-autoscale/2-install | Jaeger:kuttl-test-desired-mastodon/auto-provisioned created logger.go:42: 13:49:19 | streaming-with-autoprovisioning-autoscale/2-install | test step completed 2-install logger.go:42: 13:49:19 | streaming-with-autoprovisioning-autoscale/3- | starting test step 3- logger.go:42: 13:49:54 | streaming-with-autoprovisioning-autoscale/3- | test step completed 3- logger.go:42: 13:49:54 | streaming-with-autoprovisioning-autoscale/4- | starting test step 4- logger.go:42: 13:50:24 | streaming-with-autoprovisioning-autoscale/4- | test step completed 4- logger.go:42: 13:50:24 | streaming-with-autoprovisioning-autoscale/5- | starting test step 5- logger.go:42: 13:50:48 | streaming-with-autoprovisioning-autoscale/5- | test step completed 5- logger.go:42: 13:50:48 | streaming-with-autoprovisioning-autoscale/6-install | starting test step 6-install logger.go:42: 13:50:48 | streaming-with-autoprovisioning-autoscale/6-install | Deployment:kuttl-test-desired-mastodon/tracegen created logger.go:42: 13:50:53 | streaming-with-autoprovisioning-autoscale/6-install | test step completed 6-install logger.go:42: 13:50:53 | streaming-with-autoprovisioning-autoscale/7- | starting test step 7- logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale/7- | test step failed 7- case.go:364: failed in step 7- case.go:366: --- Deployment:kuttl-test-desired-mastodon/auto-provisioned-ingester +++ Deployment:kuttl-test-desired-mastodon/auto-provisioned-ingester @@ -1,8 +1,320 @@ apiVersion: apps/v1 kind: Deployment metadata: + labels: + app: jaeger + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: auto-provisioned + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: auto-provisioned-ingester + app.kubernetes.io/part-of: jaeger + managedFields: + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:ownerReferences: + .: {} + k:{"uid":"656a3f49-460d-42f4-8720-e22252202ae9"}: {} + f:spec: + f:progressDeadlineSeconds: {} + f:replicas: {} + f:revisionHistoryLimit: {} + f:selector: {} + f:strategy: + f:type: {} + f:template: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.istio.io/inject: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:spec: + f:containers: + k:{"name":"jaeger-ingester"}: + .: {} + f:args: {} + f:env: + .: {} + k:{"name":"SPAN_STORAGE_TYPE"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:livenessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":14270,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:readinessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:resources: + .: {} + f:requests: + .: {} + f:memory: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/var/run/secrets/auto-provisioned"}: + .: {} + f:mountPath: {} + f:name: {} + k:{"mountPath":"/var/run/secrets/auto-provisioned-cluster-ca"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:enableServiceLinks: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:serviceAccount: {} + f:serviceAccountName: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"auto-provisioned-trusted-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"kafkauser-auto-provisioned"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"kafkauser-auto-provisioned-cluster-ca"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: Go-http-client + operation: Update + time: "2023-10-02T13:50:49Z" + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:deployment.kubernetes.io/revision: {} + f:status: + f:availableReplicas: {} + f:conditions: + .: {} + k:{"type":"Available"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + k:{"type":"Progressing"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + f:observedGeneration: {} + f:readyReplicas: {} + f:replicas: {} + f:updatedReplicas: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2023-10-02T13:50:56Z" name: auto-provisioned-ingester namespace: kuttl-test-desired-mastodon + ownerReferences: + - apiVersion: jaegertracing.io/v1 + controller: true + kind: Jaeger + name: auto-provisioned + uid: 656a3f49-460d-42f4-8720-e22252202ae9 +spec: + progressDeadlineSeconds: 600 + replicas: 1 + revisionHistoryLimit: 10 + selector: + matchLabels: + app: jaeger + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: auto-provisioned + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: auto-provisioned-ingester + app.kubernetes.io/part-of: jaeger + strategy: + type: Recreate + template: + metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "14270" + prometheus.io/scrape: "true" + sidecar.istio.io/inject: "false" + creationTimestamp: null + labels: + app: jaeger + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: auto-provisioned + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: auto-provisioned-ingester + app.kubernetes.io/part-of: jaeger + spec: + containers: + - args: + - --es.server-urls=http://elasticsearch:9200 + - --kafka.consumer.authentication=tls + - --kafka.consumer.brokers=auto-provisioned-kafka-bootstrap.kuttl-test-desired-mastodon.svc.cluster.local:9093 + - --kafka.consumer.tls.ca=/var/run/secrets/auto-provisioned-cluster-ca/ca.crt + - --kafka.consumer.tls.cert=/var/run/secrets/auto-provisioned/user.crt + - --kafka.consumer.tls.enabled=true + - --kafka.consumer.tls.key=/var/run/secrets/auto-provisioned/user.key + env: + - name: SPAN_STORAGE_TYPE + value: elasticsearch + image: registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:6f019a3bc4c491d31a1af50e2929aa6e01b6c2c1fc12acbd0ef12204f4e56d07 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 14270 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-ingester + ports: + - containerPort: 14270 + name: admin-http + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 14270 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: + requests: + memory: 500m + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /var/run/secrets/auto-provisioned + name: kafkauser-auto-provisioned + - mountPath: /var/run/secrets/auto-provisioned-cluster-ca + name: kafkauser-auto-provisioned-cluster-ca + - mountPath: /etc/pki/ca-trust/extracted/pem + name: auto-provisioned-trusted-ca + readOnly: true + dnsPolicy: ClusterFirst + enableServiceLinks: false + restartPolicy: Always + schedulerName: default-scheduler + securityContext: {} + serviceAccount: auto-provisioned + serviceAccountName: auto-provisioned + terminationGracePeriodSeconds: 30 + volumes: + - name: kafkauser-auto-provisioned + secret: + defaultMode: 420 + secretName: auto-provisioned + - name: kafkauser-auto-provisioned-cluster-ca + secret: + defaultMode: 420 + secretName: auto-provisioned-cluster-ca-cert + - configMap: + defaultMode: 420 + items: + - key: ca-bundle.crt + path: tls-ca-bundle.pem + name: auto-provisioned-trusted-ca + name: auto-provisioned-trusted-ca status: - readyReplicas: 2 + availableReplicas: 1 + conditions: + - lastTransitionTime: "2023-10-02T13:50:56Z" + lastUpdateTime: "2023-10-02T13:50:56Z" + message: Deployment has minimum availability. + reason: MinimumReplicasAvailable + status: "True" + type: Available + - lastTransitionTime: "2023-10-02T13:50:49Z" + lastUpdateTime: "2023-10-02T13:50:56Z" + message: ReplicaSet "auto-provisioned-ingester-76f84f684" has successfully progressed. + reason: NewReplicaSetAvailable + status: "True" + type: Progressing + observedGeneration: 1 + readyReplicas: 1 + replicas: 1 + updatedReplicas: 1 case.go:366: resource Deployment:kuttl-test-desired-mastodon/auto-provisioned-ingester: .status.readyReplicas: value mismatch, expected: 2 != actual: 1 logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | streaming-with-autoprovisioning-autoscale events from ns kuttl-test-desired-mastodon: logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:48:58 +0000 UTC Normal Pod elasticsearch-0 Scheduled Successfully assigned kuttl-test-desired-mastodon/elasticsearch-0 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:48:58 +0000 UTC Normal StatefulSet.apps elasticsearch SuccessfulCreate create Pod elasticsearch-0 in StatefulSet elasticsearch successful statefulset-controller logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:48:59 +0000 UTC Normal Pod elasticsearch-0 AddedInterface Add eth0 [10.131.0.72/23] from ovn-kubernetes logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:48:59 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulling Pulling image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:49:07 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulled Successfully pulled image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" in 8.217621902s (8.217632803s including waiting) kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:49:07 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:49:07 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:49:14 +0000 UTC Warning Pod elasticsearch-0.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Get "http://10.131.0.72:9200/": dial tcp 10.131.0.72:9200: connect: connection refused kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:49:23 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:49:24 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-zookeeper NoPods No matching pods found controllermanager logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:49:24 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-desired-mastodon/data-auto-provisioned-zookeeper-0" logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:49:24 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ExternalProvisioning waiting for a volume to be created, either by external provisioner "ebs.csi.aws.com" or manually created by system administrator persistentvolume-controller logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:49:27 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-70c5a506-06b1-4a4d-bdc8-f3032bb8752b logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:49:28 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 Scheduled Successfully assigned kuttl-test-desired-mastodon/auto-provisioned-zookeeper-0 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:49:30 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-70c5a506-06b1-4a4d-bdc8-f3032bb8752b" attachdetach-controller logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:49:33 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 AddedInterface Add eth0 [10.131.0.73/23] from ovn-kubernetes logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:49:33 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" already present on machine kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:49:34 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:49:34 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:49:55 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-kafka NoPods No matching pods found controllermanager logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:49:55 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:49:55 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ExternalProvisioning waiting for a volume to be created, either by external provisioner "ebs.csi.aws.com" or manually created by system administrator persistentvolume-controller logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:49:55 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-desired-mastodon/data-0-auto-provisioned-kafka-0" logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:49:59 +0000 UTC Normal Pod auto-provisioned-kafka-0 Scheduled Successfully assigned kuttl-test-desired-mastodon/auto-provisioned-kafka-0 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:49:59 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-0d2310ed-a5bc-4641-832f-7e495518e168 logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:02 +0000 UTC Normal Pod auto-provisioned-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-0d2310ed-a5bc-4641-832f-7e495518e168" attachdetach-controller logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:04 +0000 UTC Normal Pod auto-provisioned-kafka-0 AddedInterface Add eth0 [10.131.0.74/23] from ovn-kubernetes logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:04 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" already present on machine kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:04 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:04 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:26 +0000 UTC Normal Pod auto-provisioned-entity-operator-86d7948795-w26p9 Scheduled Successfully assigned kuttl-test-desired-mastodon/auto-provisioned-entity-operator-86d7948795-w26p9 to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:26 +0000 UTC Normal ReplicaSet.apps auto-provisioned-entity-operator-86d7948795 SuccessfulCreate Created pod: auto-provisioned-entity-operator-86d7948795-w26p9 replicaset-controller logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:26 +0000 UTC Normal Deployment.apps auto-provisioned-entity-operator ScalingReplicaSet Scaled up replica set auto-provisioned-entity-operator-86d7948795 to 1 deployment-controller logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:27 +0000 UTC Normal Pod auto-provisioned-entity-operator-86d7948795-w26p9 AddedInterface Add eth0 [10.131.0.75/23] from ovn-kubernetes logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:27 +0000 UTC Normal Pod auto-provisioned-entity-operator-86d7948795-w26p9.spec.containers{topic-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:abdf1ca15c55860e7962057fcb84d2eef800996fcbc3f9f80eeb7efa79dbafcc" already present on machine kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:27 +0000 UTC Normal Pod auto-provisioned-entity-operator-86d7948795-w26p9.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:27 +0000 UTC Normal Pod auto-provisioned-entity-operator-86d7948795-w26p9.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:27 +0000 UTC Normal Pod auto-provisioned-entity-operator-86d7948795-w26p9.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:abdf1ca15c55860e7962057fcb84d2eef800996fcbc3f9f80eeb7efa79dbafcc" already present on machine kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:27 +0000 UTC Normal Pod auto-provisioned-entity-operator-86d7948795-w26p9.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:27 +0000 UTC Normal Pod auto-provisioned-entity-operator-86d7948795-w26p9.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:27 +0000 UTC Normal Pod auto-provisioned-entity-operator-86d7948795-w26p9.spec.containers{tls-sidecar} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" already present on machine kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:27 +0000 UTC Normal Pod auto-provisioned-entity-operator-86d7948795-w26p9.spec.containers{tls-sidecar} Created Created container tls-sidecar kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:27 +0000 UTC Normal Pod auto-provisioned-entity-operator-86d7948795-w26p9.spec.containers{tls-sidecar} Started Started container tls-sidecar kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:48 +0000 UTC Normal Pod tracegen-5b7fd5c86-q4rpd Scheduled Successfully assigned kuttl-test-desired-mastodon/tracegen-5b7fd5c86-q4rpd to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:48 +0000 UTC Warning Pod tracegen-5b7fd5c86-q4rpd FailedMount MountVolume.SetUp failed for volume "auto-provisioned-trusted-ca" : configmap "auto-provisioned-trusted-ca" not found kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:48 +0000 UTC Warning Pod tracegen-5b7fd5c86-q4rpd FailedMount MountVolume.SetUp failed for volume "auto-provisioned-service-ca" : configmap "auto-provisioned-service-ca" not found kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:48 +0000 UTC Normal ReplicaSet.apps tracegen-5b7fd5c86 SuccessfulCreate Created pod: tracegen-5b7fd5c86-q4rpd replicaset-controller logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:48 +0000 UTC Normal Deployment.apps tracegen ScalingReplicaSet Scaled up replica set tracegen-5b7fd5c86 to 1 deployment-controller logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:49 +0000 UTC Normal Pod auto-provisioned-collector-56cb9f44f4-9p542 Scheduled Successfully assigned kuttl-test-desired-mastodon/auto-provisioned-collector-56cb9f44f4-9p542 to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:49 +0000 UTC Normal ReplicaSet.apps auto-provisioned-collector-56cb9f44f4 SuccessfulCreate Created pod: auto-provisioned-collector-56cb9f44f4-9p542 replicaset-controller logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:49 +0000 UTC Normal Deployment.apps auto-provisioned-collector ScalingReplicaSet Scaled up replica set auto-provisioned-collector-56cb9f44f4 to 1 deployment-controller logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:49 +0000 UTC Normal Pod auto-provisioned-ingester-76f84f684-gfqd8 Scheduled Successfully assigned kuttl-test-desired-mastodon/auto-provisioned-ingester-76f84f684-gfqd8 to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:49 +0000 UTC Normal ReplicaSet.apps auto-provisioned-ingester-76f84f684 SuccessfulCreate Created pod: auto-provisioned-ingester-76f84f684-gfqd8 replicaset-controller logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:49 +0000 UTC Normal Deployment.apps auto-provisioned-ingester ScalingReplicaSet Scaled up replica set auto-provisioned-ingester-76f84f684 to 1 deployment-controller logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:49 +0000 UTC Normal Pod auto-provisioned-query-5dbc5b65db-pldjn Scheduled Successfully assigned kuttl-test-desired-mastodon/auto-provisioned-query-5dbc5b65db-pldjn to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:49 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-5dbc5b65db SuccessfulCreate Created pod: auto-provisioned-query-5dbc5b65db-pldjn replicaset-controller logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:49 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-5dbc5b65db to 1 deployment-controller logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:50 +0000 UTC Normal Pod auto-provisioned-collector-56cb9f44f4-9p542 AddedInterface Add eth0 [10.128.2.60/23] from ovn-kubernetes logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:50 +0000 UTC Normal Pod auto-provisioned-collector-56cb9f44f4-9p542.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:50 +0000 UTC Normal Pod auto-provisioned-collector-56cb9f44f4-9p542.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:50 +0000 UTC Normal Pod auto-provisioned-collector-56cb9f44f4-9p542.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:50 +0000 UTC Normal Pod auto-provisioned-ingester-76f84f684-gfqd8 AddedInterface Add eth0 [10.129.2.115/23] from ovn-kubernetes logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:50 +0000 UTC Normal Pod auto-provisioned-ingester-76f84f684-gfqd8.spec.containers{jaeger-ingester} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:6f019a3bc4c491d31a1af50e2929aa6e01b6c2c1fc12acbd0ef12204f4e56d07" kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:50 +0000 UTC Normal Pod auto-provisioned-query-5dbc5b65db-pldjn AddedInterface Add eth0 [10.128.2.61/23] from ovn-kubernetes logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:50 +0000 UTC Normal Pod auto-provisioned-query-5dbc5b65db-pldjn.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:50 +0000 UTC Normal Pod auto-provisioned-query-5dbc5b65db-pldjn.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:50 +0000 UTC Normal Pod auto-provisioned-query-5dbc5b65db-pldjn.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:50 +0000 UTC Normal Pod auto-provisioned-query-5dbc5b65db-pldjn.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:50 +0000 UTC Normal Pod auto-provisioned-query-5dbc5b65db-pldjn.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:50 +0000 UTC Normal Pod auto-provisioned-query-5dbc5b65db-pldjn.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:50 +0000 UTC Normal Pod auto-provisioned-query-5dbc5b65db-pldjn.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:50 +0000 UTC Normal Pod auto-provisioned-query-5dbc5b65db-pldjn.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:50 +0000 UTC Normal Pod auto-provisioned-query-5dbc5b65db-pldjn.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:50 +0000 UTC Normal Pod tracegen-5b7fd5c86-q4rpd AddedInterface Add eth0 [10.129.2.114/23] from ovn-kubernetes logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:50 +0000 UTC Normal Pod tracegen-5b7fd5c86-q4rpd.spec.containers{tracegen} Pulling Pulling image "jaegertracing/jaeger-tracegen:1.49.0" kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:51 +0000 UTC Normal Pod tracegen-5b7fd5c86-q4rpd.spec.containers{tracegen} Pulled Successfully pulled image "jaegertracing/jaeger-tracegen:1.49.0" in 1.153667528s (1.153676278s including waiting) kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:51 +0000 UTC Normal Pod tracegen-5b7fd5c86-q4rpd.spec.containers{tracegen} Created Created container tracegen kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:51 +0000 UTC Normal Pod tracegen-5b7fd5c86-q4rpd.spec.containers{tracegen} Started Started container tracegen kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:51 +0000 UTC Normal Pod tracegen-5b7fd5c86-q4rpd.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:51 +0000 UTC Normal Pod tracegen-5b7fd5c86-q4rpd.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:51 +0000 UTC Normal Pod tracegen-5b7fd5c86-q4rpd.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:55 +0000 UTC Normal Pod auto-provisioned-ingester-76f84f684-gfqd8.spec.containers{jaeger-ingester} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:6f019a3bc4c491d31a1af50e2929aa6e01b6c2c1fc12acbd0ef12204f4e56d07" in 5.437141761s (5.437151841s including waiting) kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:55 +0000 UTC Normal Pod auto-provisioned-ingester-76f84f684-gfqd8.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:50:55 +0000 UTC Normal Pod auto-provisioned-ingester-76f84f684-gfqd8.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:51:12 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:51:12 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:51:12 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:51:12 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:51:12 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:51:12 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:51:42 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:51:42 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:51:57 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod auto-provisioned-collector-56cb9f44f4-9p542 horizontal-pod-autoscaler logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:52:12 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:52:12 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedComputeMetricsReplicas invalid metrics (1 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:56:12 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod auto-provisioned-collector-56cb9f44f4-9p542 horizontal-pod-autoscaler logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | 2023-10-02 13:56:12 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-ingester of Pod auto-provisioned-ingester-76f84f684-gfqd8 horizontal-pod-autoscaler logger.go:42: 13:57:53 | streaming-with-autoprovisioning-autoscale | Deleting namespace: kuttl-test-desired-mastodon === CONT kuttl/harness/streaming-with-tls logger.go:42: 13:58:12 | streaming-with-tls | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:58:12 | streaming-with-tls | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:58:12 | streaming-with-tls | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:58:12 | streaming-with-tls | Creating namespace: kuttl-test-able-oryx logger.go:42: 13:58:12 | streaming-with-tls/0-install | starting test step 0-install logger.go:42: 13:58:12 | streaming-with-tls/0-install | running command: [sh -c cd /tmp/jaeger-tests && make undeploy-kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true] logger.go:42: 13:58:12 | streaming-with-tls/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 13:58:13 | streaming-with-tls/0-install | >>>> Skiping kafka-operator undeploy logger.go:42: 13:58:13 | streaming-with-tls/0-install | kubectl delete --namespace kuttl-test-able-oryx -f tests/_build/kafka-example.yaml 2>&1 || true logger.go:42: 13:58:13 | streaming-with-tls/0-install | error: the path "tests/_build/kafka-example.yaml" does not exist logger.go:42: 13:58:13 | streaming-with-tls/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 13:58:13 | streaming-with-tls/0-install | running command: [sh -c cd /tmp/jaeger-tests && make kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true KAFKA_VERSION=0.32.0] logger.go:42: 13:58:13 | streaming-with-tls/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 13:58:13 | streaming-with-tls/0-install | >>>> Creating namespace kuttl-test-able-oryx logger.go:42: 13:58:13 | streaming-with-tls/0-install | kubectl create namespace kuttl-test-able-oryx 2>&1 | grep -v "already exists" || true logger.go:42: 13:58:13 | streaming-with-tls/0-install | >>>> Skipping kafka-operator deployment, assuming it has been installed via OperatorHub logger.go:42: 13:58:13 | streaming-with-tls/0-install | >>>> Creating namespace kuttl-test-able-oryx logger.go:42: 13:58:13 | streaming-with-tls/0-install | mkdir -p tests/_build/ logger.go:42: 13:58:13 | streaming-with-tls/0-install | kubectl create namespace kuttl-test-able-oryx 2>&1 | grep -v "already exists" || true logger.go:42: 13:58:13 | streaming-with-tls/0-install | curl --fail --location "https://raw.githubusercontent.com/strimzi/strimzi-kafka-operator/0.32.0/examples/kafka/kafka-persistent-single.yaml" --output tests/_build/kafka-example.yaml --create-dirs logger.go:42: 13:58:13 | streaming-with-tls/0-install | % Total % Received % Xferd Average Speed Time Time Time Current logger.go:42: 13:58:13 | streaming-with-tls/0-install | Dload Upload Total Spent Left Speed logger.go:42: 13:58:13 | streaming-with-tls/0-install | 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 865 100 865 0 0 7339 0 --:--:-- --:--:-- --:--:-- 7393 logger.go:42: 13:58:13 | streaming-with-tls/0-install | "sed" -i 's/size: 100Gi/size: 10Gi/g' tests/_build/kafka-example.yaml logger.go:42: 13:58:13 | streaming-with-tls/0-install | kubectl -n kuttl-test-able-oryx apply --dry-run=client -f tests/_build/kafka-example.yaml logger.go:42: 13:58:13 | streaming-with-tls/0-install | kafka.kafka.strimzi.io/my-cluster created (dry run) logger.go:42: 13:58:13 | streaming-with-tls/0-install | kubectl -n kuttl-test-able-oryx apply -f tests/_build/kafka-example.yaml 2>&1 | grep -v "already exists" || true logger.go:42: 13:58:14 | streaming-with-tls/0-install | kafka.kafka.strimzi.io/my-cluster created logger.go:42: 13:58:14 | streaming-with-tls/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 14:05:14 | streaming-with-tls/0-install | test step failed 0-install case.go:364: failed in step 0-install case.go:366: strimzipodsets.core.strimzi.io "my-cluster-zookeeper" not found logger.go:42: 14:05:14 | streaming-with-tls | streaming-with-tls events from ns kuttl-test-able-oryx: logger.go:42: 14:05:14 | streaming-with-tls | Deleting namespace: kuttl-test-able-oryx === CONT kuttl/harness/streaming-simple logger.go:42: 14:05:20 | streaming-simple | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 14:05:20 | streaming-simple | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 14:05:20 | streaming-simple | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 14:05:20 | streaming-simple | Creating namespace: kuttl-test-dear-garfish logger.go:42: 14:05:20 | streaming-simple/0-install | starting test step 0-install logger.go:42: 14:05:20 | streaming-simple/0-install | running command: [sh -c cd /tmp/jaeger-tests && make undeploy-kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true] logger.go:42: 14:05:20 | streaming-simple/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 14:05:20 | streaming-simple/0-install | >>>> Skiping kafka-operator undeploy logger.go:42: 14:05:20 | streaming-simple/0-install | kubectl delete --namespace kuttl-test-dear-garfish -f tests/_build/kafka-example.yaml 2>&1 || true logger.go:42: 14:05:20 | streaming-simple/0-install | Error from server (NotFound): error when deleting "tests/_build/kafka-example.yaml": kafkas.kafka.strimzi.io "my-cluster" not found logger.go:42: 14:05:20 | streaming-simple/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 14:05:20 | streaming-simple/0-install | running command: [sh -c cd /tmp/jaeger-tests && make kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true KAFKA_VERSION=0.32.0] logger.go:42: 14:05:20 | streaming-simple/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 14:05:20 | streaming-simple/0-install | >>>> Creating namespace kuttl-test-dear-garfish logger.go:42: 14:05:20 | streaming-simple/0-install | kubectl create namespace kuttl-test-dear-garfish 2>&1 | grep -v "already exists" || true logger.go:42: 14:05:21 | streaming-simple/0-install | >>>> Skipping kafka-operator deployment, assuming it has been installed via OperatorHub logger.go:42: 14:05:21 | streaming-simple/0-install | >>>> Creating namespace kuttl-test-dear-garfish logger.go:42: 14:05:21 | streaming-simple/0-install | mkdir -p tests/_build/ logger.go:42: 14:05:21 | streaming-simple/0-install | kubectl create namespace kuttl-test-dear-garfish 2>&1 | grep -v "already exists" || true logger.go:42: 14:05:21 | streaming-simple/0-install | curl --fail --location "https://raw.githubusercontent.com/strimzi/strimzi-kafka-operator/0.32.0/examples/kafka/kafka-persistent-single.yaml" --output tests/_build/kafka-example.yaml --create-dirs logger.go:42: 14:05:21 | streaming-simple/0-install | % Total % Received % Xferd Average Speed Time Time Time Current logger.go:42: 14:05:21 | streaming-simple/0-install | Dload Upload Total Spent Left Speed logger.go:42: 14:05:21 | streaming-simple/0-install | 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 865 100 865 0 0 6319 0 --:--:-- --:--:-- --:--:-- 6360 logger.go:42: 14:05:21 | streaming-simple/0-install | "sed" -i 's/size: 100Gi/size: 10Gi/g' tests/_build/kafka-example.yaml logger.go:42: 14:05:21 | streaming-simple/0-install | kubectl -n kuttl-test-dear-garfish apply --dry-run=client -f tests/_build/kafka-example.yaml logger.go:42: 14:05:21 | streaming-simple/0-install | kafka.kafka.strimzi.io/my-cluster created (dry run) logger.go:42: 14:05:21 | streaming-simple/0-install | kubectl -n kuttl-test-dear-garfish apply -f tests/_build/kafka-example.yaml 2>&1 | grep -v "already exists" || true logger.go:42: 14:05:21 | streaming-simple/0-install | kafka.kafka.strimzi.io/my-cluster created logger.go:42: 14:05:21 | streaming-simple/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 14:12:22 | streaming-simple/0-install | test step failed 0-install case.go:364: failed in step 0-install case.go:366: strimzipodsets.core.strimzi.io "my-cluster-zookeeper" not found logger.go:42: 14:12:22 | streaming-simple | streaming-simple events from ns kuttl-test-dear-garfish: logger.go:42: 14:12:22 | streaming-simple | Deleting namespace: kuttl-test-dear-garfish === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- FAIL: kuttl (1422.31s) --- FAIL: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.06s) --- FAIL: kuttl/harness/streaming-with-autoprovisioning-autoscale (560.63s) --- FAIL: kuttl/harness/streaming-with-tls (427.86s) --- FAIL: kuttl/harness/streaming-simple (427.72s) FAIL + exit_code=1 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name streaming --report --output /logs/artifacts/streaming.xml ./artifacts/kuttl-report.xml time="2023-10-02T14:12:29Z" level=debug msg="Setting a new name for the test suites" time="2023-10-02T14:12:29Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-10-02T14:12:29Z" level=debug msg="normalizing test case names" time="2023-10-02T14:12:29Z" level=debug msg="streaming/artifacts -> streaming_artifacts" time="2023-10-02T14:12:29Z" level=debug msg="streaming/streaming-with-autoprovisioning-autoscale -> streaming_streaming_with_autoprovisioning_autoscale" time="2023-10-02T14:12:29Z" level=debug msg="streaming/streaming-with-tls -> streaming_streaming_with_tls" time="2023-10-02T14:12:29Z" level=debug msg="streaming/streaming-simple -> streaming_streaming_simple" +-----------------------------------------------------+--------+ | NAME | RESULT | +-----------------------------------------------------+--------+ | streaming_artifacts | passed | | streaming_streaming_with_autoprovisioning_autoscale | failed | | streaming_streaming_with_tls | failed | | streaming_streaming_simple | failed | +-----------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 1 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 3 -gt 0 ']' + count=2 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/sidecar.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/streaming.xml + '[' 3 -gt 0 ']' + count=3 + '[' 3 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh ui false true + '[' 3 -ne 3 ']' + test_suite_name=ui + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/ui.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-ui make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true ./tests/e2e/ui/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-29-231104 True False 98m Cluster version is 4.14.0-0.nightly-2023-09-29-231104' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-29-231104 True False 98m Cluster version is 4.14.0-0.nightly-2023-09-29-231104' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/ui/render.sh ++ export SUITE_DIR=./tests/e2e/ui ++ SUITE_DIR=./tests/e2e/ui ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/ui ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test allinone + '[' 1 -ne 1 ']' + test_name=allinone + echo =========================================================================== =========================================================================== + info 'Rendering files for test allinone' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test allinone\e[0m' Rendering files for test allinone + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build + '[' _build '!=' _build ']' + mkdir -p allinone + cd allinone + export GET_URL_COMMAND + export URL + export JAEGER_NAME=all-in-one-ui + JAEGER_NAME=all-in-one-ui + '[' true = true ']' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./01-curl.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./04-test-ui-config.yaml + start_test production + '[' 1 -ne 1 ']' + test_name=production + echo =========================================================================== =========================================================================== + info 'Rendering files for test production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test production\e[0m' Rendering files for test production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build/allinone + '[' allinone '!=' _build ']' + cd .. + mkdir -p production + cd production + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + [[ true = true ]] + [[ true = true ]] + render_install_jaeger production-ui production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + '[' true = true ']' + INSECURE=true + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-forbbiden-access.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-curl.yaml + INSECURE=true + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./05-check-disabled-security.yaml + ASSERT_PRESENT=false + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./06-check-NO-gaID.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./08-check-gaID.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running ui E2E tests' Running ui E2E tests + cd tests/e2e/ui/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3595368357 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 3 tests === RUN kuttl/harness === RUN kuttl/harness/allinone === PAUSE kuttl/harness/allinone === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/production === PAUSE kuttl/harness/production === CONT kuttl/harness/allinone logger.go:42: 14:12:36 | allinone | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 14:12:36 | allinone | Creating namespace: kuttl-test-hopeful-quetzal logger.go:42: 14:12:36 | allinone/0-install | starting test step 0-install logger.go:42: 14:12:36 | allinone/0-install | Jaeger:kuttl-test-hopeful-quetzal/all-in-one-ui created logger.go:42: 14:12:40 | allinone/0-install | test step completed 0-install logger.go:42: 14:12:40 | allinone/1-curl | starting test step 1-curl logger.go:42: 14:12:40 | allinone/1-curl | running command: [./ensure-ingress-host.sh] logger.go:42: 14:12:40 | allinone/1-curl | Checking the Ingress host value was populated logger.go:42: 14:12:40 | allinone/1-curl | Try number 0 logger.go:42: 14:12:40 | allinone/1-curl | error: error executing jsonpath "{.items[0].status.ingress[0].host}": Error executing template: array index out of bounds: index 0, length 0. Printing more information for debugging the template: logger.go:42: 14:12:40 | allinone/1-curl | template was: logger.go:42: 14:12:40 | allinone/1-curl | {.items[0].status.ingress[0].host} logger.go:42: 14:12:40 | allinone/1-curl | object given to jsonpath engine was: logger.go:42: 14:12:40 | allinone/1-curl | map[string]interface {}{"apiVersion":"v1", "items":[]interface {}{}, "kind":"List", "metadata":map[string]interface {}{"resourceVersion":""}} logger.go:42: 14:12:40 | allinone/1-curl | logger.go:42: 14:12:40 | allinone/1-curl | logger.go:42: 14:12:50 | allinone/1-curl | Try number 1 logger.go:42: 14:12:50 | allinone/1-curl | Hostname is all-in-one-ui-kuttl-test-hopeful-quetzal.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com logger.go:42: 14:12:50 | allinone/1-curl | running command: [sh -c ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE all-in-one-ui] logger.go:42: 14:12:50 | allinone/1-curl | Checking an expected HTTP response logger.go:42: 14:12:50 | allinone/1-curl | Running in OpenShift logger.go:42: 14:12:50 | allinone/1-curl | User not provided. Getting the token... logger.go:42: 14:12:51 | allinone/1-curl | Warning: resource jaegers/all-in-one-ui is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 14:12:57 | allinone/1-curl | Try number 1/30 the https://all-in-one-ui-kuttl-test-hopeful-quetzal.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 14:12:57 | allinone/1-curl | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 14:12:57 | allinone/1-curl | Try number 2/30 the https://all-in-one-ui-kuttl-test-hopeful-quetzal.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 14:12:57 | allinone/1-curl | HTTP response is 503. 200 expected. Waiting 10 s logger.go:42: 14:13:07 | allinone/1-curl | Try number 3/30 the https://all-in-one-ui-kuttl-test-hopeful-quetzal.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 14:13:08 | allinone/1-curl | curl response asserted properly logger.go:42: 14:13:08 | allinone/1-curl | test step completed 1-curl logger.go:42: 14:13:08 | allinone/2-delete | starting test step 2-delete logger.go:42: 14:13:08 | allinone/2-delete | Jaeger:kuttl-test-hopeful-quetzal/all-in-one-ui created logger.go:42: 14:13:08 | allinone/2-delete | test step completed 2-delete logger.go:42: 14:13:08 | allinone/3-install | starting test step 3-install logger.go:42: 14:13:08 | allinone/3-install | Jaeger:kuttl-test-hopeful-quetzal/all-in-one-ui updated logger.go:42: 14:13:08 | allinone/3-install | test step completed 3-install logger.go:42: 14:13:08 | allinone/4-test-ui-config | starting test step 4-test-ui-config logger.go:42: 14:13:08 | allinone/4-test-ui-config | running command: [./ensure-ingress-host.sh] logger.go:42: 14:13:08 | allinone/4-test-ui-config | Checking the Ingress host value was populated logger.go:42: 14:13:08 | allinone/4-test-ui-config | Try number 0 logger.go:42: 14:13:08 | allinone/4-test-ui-config | error: error executing jsonpath "{.items[0].status.ingress[0].host}": Error executing template: array index out of bounds: index 0, length 0. Printing more information for debugging the template: logger.go:42: 14:13:08 | allinone/4-test-ui-config | template was: logger.go:42: 14:13:08 | allinone/4-test-ui-config | {.items[0].status.ingress[0].host} logger.go:42: 14:13:08 | allinone/4-test-ui-config | object given to jsonpath engine was: logger.go:42: 14:13:08 | allinone/4-test-ui-config | map[string]interface {}{"apiVersion":"v1", "items":[]interface {}{}, "kind":"List", "metadata":map[string]interface {}{"resourceVersion":""}} logger.go:42: 14:13:08 | allinone/4-test-ui-config | logger.go:42: 14:13:08 | allinone/4-test-ui-config | logger.go:42: 14:13:18 | allinone/4-test-ui-config | Try number 1 logger.go:42: 14:13:18 | allinone/4-test-ui-config | Hostname is all-in-one-ui-kuttl-test-hopeful-quetzal.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com logger.go:42: 14:13:18 | allinone/4-test-ui-config | running command: [sh -c ASSERT_PRESENT=true EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 14:13:19 | allinone/4-test-ui-config | time="2023-10-02T14:13:19Z" level=info msg="Querying https://all-in-one-ui-kuttl-test-hopeful-quetzal.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search..." logger.go:42: 14:13:19 | allinone/4-test-ui-config | time="2023-10-02T14:13:19Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 14:13:19 | allinone/4-test-ui-config | time="2023-10-02T14:13:19Z" level=info msg="Polling to https://all-in-one-ui-kuttl-test-hopeful-quetzal.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search" logger.go:42: 14:13:19 | allinone/4-test-ui-config | time="2023-10-02T14:13:19Z" level=info msg="Doing request number 0" logger.go:42: 14:13:19 | allinone/4-test-ui-config | time="2023-10-02T14:13:19Z" level=info msg="Content found and asserted!" logger.go:42: 14:13:19 | allinone/4-test-ui-config | time="2023-10-02T14:13:19Z" level=info msg="Success!" logger.go:42: 14:13:19 | allinone/4-test-ui-config | test step completed 4-test-ui-config logger.go:42: 14:13:19 | allinone | allinone events from ns kuttl-test-hopeful-quetzal: logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:39 +0000 UTC Normal Pod all-in-one-ui-7c8669cd4c-c4mxg Scheduled Successfully assigned kuttl-test-hopeful-quetzal/all-in-one-ui-7c8669cd4c-c4mxg to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:39 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-7c8669cd4c SuccessfulCreate Created pod: all-in-one-ui-7c8669cd4c-c4mxg replicaset-controller logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:39 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-7c8669cd4c to 1 deployment-controller logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:40 +0000 UTC Normal Pod all-in-one-ui-7c8669cd4c-c4mxg AddedInterface Add eth0 [10.131.0.77/23] from ovn-kubernetes logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:40 +0000 UTC Normal Pod all-in-one-ui-7c8669cd4c-c4mxg.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:40 +0000 UTC Normal Pod all-in-one-ui-7c8669cd4c-c4mxg.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:40 +0000 UTC Normal Pod all-in-one-ui-7c8669cd4c-c4mxg.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:40 +0000 UTC Normal Pod all-in-one-ui-7c8669cd4c-c4mxg.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:40 +0000 UTC Normal Pod all-in-one-ui-7c8669cd4c-c4mxg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:40 +0000 UTC Normal Pod all-in-one-ui-7c8669cd4c-c4mxg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:52 +0000 UTC Normal Pod all-in-one-ui-7c8669cd4c-c4mxg.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:52 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-7c8669cd4c SuccessfulDelete Deleted pod: all-in-one-ui-7c8669cd4c-c4mxg replicaset-controller logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:52 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled down replica set all-in-one-ui-7c8669cd4c to 0 from 1 deployment-controller logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:53 +0000 UTC Normal Pod all-in-one-ui-7c8669cd4c-c4mxg.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:54 +0000 UTC Normal Pod all-in-one-ui-69cf9bc998-9qwcf Scheduled Successfully assigned kuttl-test-hopeful-quetzal/all-in-one-ui-69cf9bc998-9qwcf to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:54 +0000 UTC Normal Pod all-in-one-ui-69cf9bc998-9qwcf AddedInterface Add eth0 [10.131.0.78/23] from ovn-kubernetes logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:54 +0000 UTC Normal Pod all-in-one-ui-69cf9bc998-9qwcf.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:54 +0000 UTC Normal Pod all-in-one-ui-69cf9bc998-9qwcf.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:54 +0000 UTC Normal Pod all-in-one-ui-69cf9bc998-9qwcf.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:54 +0000 UTC Normal Pod all-in-one-ui-69cf9bc998-9qwcf.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:54 +0000 UTC Normal Pod all-in-one-ui-69cf9bc998-9qwcf.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:54 +0000 UTC Normal Pod all-in-one-ui-69cf9bc998-9qwcf.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:54 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-69cf9bc998 SuccessfulCreate Created pod: all-in-one-ui-69cf9bc998-9qwcf replicaset-controller logger.go:42: 14:13:19 | allinone | 2023-10-02 14:12:54 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-69cf9bc998 to 1 deployment-controller logger.go:42: 14:13:19 | allinone | 2023-10-02 14:13:08 +0000 UTC Normal Pod all-in-one-ui-69cf9bc998-9qwcf.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 14:13:19 | allinone | 2023-10-02 14:13:08 +0000 UTC Normal Pod all-in-one-ui-69cf9bc998-9qwcf.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 14:13:19 | allinone | 2023-10-02 14:13:14 +0000 UTC Normal Pod all-in-one-ui-64bc7d7995-gtp8z Scheduled Successfully assigned kuttl-test-hopeful-quetzal/all-in-one-ui-64bc7d7995-gtp8z to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 14:13:19 | allinone | 2023-10-02 14:13:14 +0000 UTC Normal Pod all-in-one-ui-64bc7d7995-gtp8z AddedInterface Add eth0 [10.129.2.116/23] from ovn-kubernetes logger.go:42: 14:13:19 | allinone | 2023-10-02 14:13:14 +0000 UTC Normal Pod all-in-one-ui-64bc7d7995-gtp8z.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 14:13:19 | allinone | 2023-10-02 14:13:14 +0000 UTC Normal Pod all-in-one-ui-64bc7d7995-gtp8z.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 14:13:19 | allinone | 2023-10-02 14:13:14 +0000 UTC Normal Pod all-in-one-ui-64bc7d7995-gtp8z.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 14:13:19 | allinone | 2023-10-02 14:13:14 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-64bc7d7995 SuccessfulCreate Created pod: all-in-one-ui-64bc7d7995-gtp8z replicaset-controller logger.go:42: 14:13:19 | allinone | 2023-10-02 14:13:14 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-64bc7d7995 to 1 deployment-controller logger.go:42: 14:13:19 | allinone | Deleting namespace: kuttl-test-hopeful-quetzal === CONT kuttl/harness/production logger.go:42: 14:13:25 | production | Ignoring add-tracking-id.yaml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 14:13:25 | production | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 14:13:25 | production | Creating namespace: kuttl-test-fond-sunbird logger.go:42: 14:13:25 | production/1-install | starting test step 1-install logger.go:42: 14:13:25 | production/1-install | Jaeger:kuttl-test-fond-sunbird/production-ui created logger.go:42: 14:14:02 | production/1-install | test step completed 1-install logger.go:42: 14:14:02 | production/2-check-forbbiden-access | starting test step 2-check-forbbiden-access logger.go:42: 14:14:02 | production/2-check-forbbiden-access | running command: [./ensure-ingress-host.sh] logger.go:42: 14:14:02 | production/2-check-forbbiden-access | Checking the Ingress host value was populated logger.go:42: 14:14:02 | production/2-check-forbbiden-access | Try number 0 logger.go:42: 14:14:02 | production/2-check-forbbiden-access | Hostname is production-ui-kuttl-test-fond-sunbird.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com logger.go:42: 14:14:02 | production/2-check-forbbiden-access | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE production-ui] logger.go:42: 14:14:02 | production/2-check-forbbiden-access | Checking an expected HTTP response logger.go:42: 14:14:02 | production/2-check-forbbiden-access | Running in OpenShift logger.go:42: 14:14:02 | production/2-check-forbbiden-access | Not using any secret logger.go:42: 14:14:02 | production/2-check-forbbiden-access | Try number 1/30 the https://production-ui-kuttl-test-fond-sunbird.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 14:14:02 | production/2-check-forbbiden-access | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 14:14:02 | production/2-check-forbbiden-access | Try number 2/30 the https://production-ui-kuttl-test-fond-sunbird.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 14:14:02 | production/2-check-forbbiden-access | HTTP response is 503. 403 expected. Waiting 10 s logger.go:42: 14:14:12 | production/2-check-forbbiden-access | Try number 3/30 the https://production-ui-kuttl-test-fond-sunbird.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 14:14:12 | production/2-check-forbbiden-access | curl response asserted properly logger.go:42: 14:14:12 | production/2-check-forbbiden-access | test step completed 2-check-forbbiden-access logger.go:42: 14:14:12 | production/3-curl | starting test step 3-curl logger.go:42: 14:14:12 | production/3-curl | running command: [./ensure-ingress-host.sh] logger.go:42: 14:14:12 | production/3-curl | Checking the Ingress host value was populated logger.go:42: 14:14:12 | production/3-curl | Try number 0 logger.go:42: 14:14:12 | production/3-curl | Hostname is production-ui-kuttl-test-fond-sunbird.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com logger.go:42: 14:14:12 | production/3-curl | running command: [sh -c ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE production-ui] logger.go:42: 14:14:12 | production/3-curl | Checking an expected HTTP response logger.go:42: 14:14:12 | production/3-curl | Running in OpenShift logger.go:42: 14:14:12 | production/3-curl | User not provided. Getting the token... logger.go:42: 14:14:13 | production/3-curl | Warning: resource jaegers/production-ui is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 14:14:20 | production/3-curl | Try number 1/30 the https://production-ui-kuttl-test-fond-sunbird.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 14:14:20 | production/3-curl | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 14:14:20 | production/3-curl | Try number 2/30 the https://production-ui-kuttl-test-fond-sunbird.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 14:14:20 | production/3-curl | HTTP response is 503. 200 expected. Waiting 10 s logger.go:42: 14:14:30 | production/3-curl | Try number 3/30 the https://production-ui-kuttl-test-fond-sunbird.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 14:14:30 | production/3-curl | curl response asserted properly logger.go:42: 14:14:30 | production/3-curl | test step completed 3-curl logger.go:42: 14:14:30 | production/4-install | starting test step 4-install logger.go:42: 14:14:30 | production/4-install | Jaeger:kuttl-test-fond-sunbird/production-ui updated logger.go:42: 14:14:30 | production/4-install | test step completed 4-install logger.go:42: 14:14:30 | production/5-check-disabled-security | starting test step 5-check-disabled-security logger.go:42: 14:14:30 | production/5-check-disabled-security | running command: [./ensure-ingress-host.sh] logger.go:42: 14:14:30 | production/5-check-disabled-security | Checking the Ingress host value was populated logger.go:42: 14:14:30 | production/5-check-disabled-security | Try number 0 logger.go:42: 14:14:30 | production/5-check-disabled-security | Hostname is production-ui-kuttl-test-fond-sunbird.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com logger.go:42: 14:14:30 | production/5-check-disabled-security | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE production-ui] logger.go:42: 14:14:30 | production/5-check-disabled-security | Checking an expected HTTP response logger.go:42: 14:14:30 | production/5-check-disabled-security | Running in OpenShift logger.go:42: 14:14:30 | production/5-check-disabled-security | Not using any secret logger.go:42: 14:14:30 | production/5-check-disabled-security | Try number 1/30 the https://production-ui-kuttl-test-fond-sunbird.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 14:14:30 | production/5-check-disabled-security | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 14:14:30 | production/5-check-disabled-security | Try number 2/30 the https://production-ui-kuttl-test-fond-sunbird.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 14:14:30 | production/5-check-disabled-security | HTTP response is 403. 200 expected. Waiting 10 s logger.go:42: 14:14:40 | production/5-check-disabled-security | Try number 3/30 the https://production-ui-kuttl-test-fond-sunbird.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 14:14:40 | production/5-check-disabled-security | curl response asserted properly logger.go:42: 14:14:40 | production/5-check-disabled-security | test step completed 5-check-disabled-security logger.go:42: 14:14:40 | production/6-check-NO-gaID | starting test step 6-check-NO-gaID logger.go:42: 14:14:40 | production/6-check-NO-gaID | running command: [./ensure-ingress-host.sh] logger.go:42: 14:14:40 | production/6-check-NO-gaID | Checking the Ingress host value was populated logger.go:42: 14:14:40 | production/6-check-NO-gaID | Try number 0 logger.go:42: 14:14:40 | production/6-check-NO-gaID | Hostname is production-ui-kuttl-test-fond-sunbird.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com logger.go:42: 14:14:40 | production/6-check-NO-gaID | running command: [sh -c ASSERT_PRESENT=false EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 14:14:40 | production/6-check-NO-gaID | time="2023-10-02T14:14:40Z" level=info msg="Querying https://production-ui-kuttl-test-fond-sunbird.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search..." logger.go:42: 14:14:40 | production/6-check-NO-gaID | time="2023-10-02T14:14:40Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 14:14:40 | production/6-check-NO-gaID | time="2023-10-02T14:14:40Z" level=info msg="Polling to https://production-ui-kuttl-test-fond-sunbird.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search" logger.go:42: 14:14:40 | production/6-check-NO-gaID | time="2023-10-02T14:14:40Z" level=info msg="Doing request number 0" logger.go:42: 14:14:40 | production/6-check-NO-gaID | time="2023-10-02T14:14:40Z" level=info msg="Content not found and asserted it was not found!" logger.go:42: 14:14:40 | production/6-check-NO-gaID | time="2023-10-02T14:14:40Z" level=info msg="Success!" logger.go:42: 14:14:40 | production/6-check-NO-gaID | test step completed 6-check-NO-gaID logger.go:42: 14:14:40 | production/7-add-tracking-id | starting test step 7-add-tracking-id logger.go:42: 14:14:40 | production/7-add-tracking-id | running command: [sh -c kubectl apply -f add-tracking-id.yaml -n $NAMESPACE] logger.go:42: 14:14:41 | production/7-add-tracking-id | jaeger.jaegertracing.io/production-ui configured logger.go:42: 14:14:41 | production/7-add-tracking-id | test step completed 7-add-tracking-id logger.go:42: 14:14:41 | production/8-check-gaID | starting test step 8-check-gaID logger.go:42: 14:14:41 | production/8-check-gaID | running command: [./ensure-ingress-host.sh] logger.go:42: 14:14:41 | production/8-check-gaID | Checking the Ingress host value was populated logger.go:42: 14:14:41 | production/8-check-gaID | Try number 0 logger.go:42: 14:14:41 | production/8-check-gaID | Hostname is production-ui-kuttl-test-fond-sunbird.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com logger.go:42: 14:14:41 | production/8-check-gaID | running command: [sh -c ASSERT_PRESENT=true EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 14:14:41 | production/8-check-gaID | time="2023-10-02T14:14:41Z" level=info msg="Querying https://production-ui-kuttl-test-fond-sunbird.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search..." logger.go:42: 14:14:41 | production/8-check-gaID | time="2023-10-02T14:14:41Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 14:14:41 | production/8-check-gaID | time="2023-10-02T14:14:41Z" level=info msg="Polling to https://production-ui-kuttl-test-fond-sunbird.apps.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com/search" logger.go:42: 14:14:41 | production/8-check-gaID | time="2023-10-02T14:14:41Z" level=info msg="Doing request number 0" logger.go:42: 14:14:41 | production/8-check-gaID | time="2023-10-02T14:14:41Z" level=warning msg="Found: false . Assert: true" logger.go:42: 14:14:41 | production/8-check-gaID | time="2023-10-02T14:14:41Z" level=warning msg="The condition of the test function was not accomplished" logger.go:42: 14:14:41 | production/8-check-gaID | time="2023-10-02T14:14:41Z" level=info msg="Doing request number 1" logger.go:42: 14:14:41 | production/8-check-gaID | time="2023-10-02T14:14:41Z" level=warning msg="Found: false . Assert: true" logger.go:42: 14:14:41 | production/8-check-gaID | time="2023-10-02T14:14:41Z" level=warning msg="The condition of the test function was not accomplished" logger.go:42: 14:14:49 | production/8-check-gaID | time="2023-10-02T14:14:49Z" level=info msg="Doing request number 2" logger.go:42: 14:14:49 | production/8-check-gaID | time="2023-10-02T14:14:49Z" level=info msg="Content found and asserted!" logger.go:42: 14:14:49 | production/8-check-gaID | time="2023-10-02T14:14:49Z" level=info msg="Success!" logger.go:42: 14:14:49 | production/8-check-gaID | test step completed 8-check-gaID logger.go:42: 14:14:49 | production | production events from ns kuttl-test-fond-sunbird: logger.go:42: 14:14:49 | production | 2023-10-02 14:13:31 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestfondsunbirdproductionui-1-5bd7c6ff99 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestfondsunbirdproductionui-1-5bd7ccwxsl replicaset-controller logger.go:42: 14:14:49 | production | 2023-10-02 14:13:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfondsunbirdproductionui-1-5bd7ccwxsl Scheduled Successfully assigned kuttl-test-fond-sunbird/elasticsearch-cdm-kuttltestfondsunbirdproductionui-1-5bd7ccwxsl to ip-10-0-44-255.ec2.internal default-scheduler logger.go:42: 14:14:49 | production | 2023-10-02 14:13:31 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestfondsunbirdproductionui-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestfondsunbirdproductionui-1-5bd7c6ff99 to 1 deployment-controller logger.go:42: 14:14:49 | production | 2023-10-02 14:13:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfondsunbirdproductionui-1-5bd7ccwxsl AddedInterface Add eth0 [10.131.0.79/23] from ovn-kubernetes logger.go:42: 14:14:49 | production | 2023-10-02 14:13:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfondsunbirdproductionui-1-5bd7ccwxsl.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:13:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfondsunbirdproductionui-1-5bd7ccwxsl.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:13:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfondsunbirdproductionui-1-5bd7ccwxsl.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:13:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfondsunbirdproductionui-1-5bd7ccwxsl.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:13:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfondsunbirdproductionui-1-5bd7ccwxsl.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:13:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfondsunbirdproductionui-1-5bd7ccwxsl.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:13:42 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestfondsunbirdproductionui-1-5bd7ccwxsl.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:13:47 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestfondsunbirdproductionui-1-5bd7ccwxsl.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:13:58 +0000 UTC Normal Pod production-ui-collector-7696dc959d-8dxwc Scheduled Successfully assigned kuttl-test-fond-sunbird/production-ui-collector-7696dc959d-8dxwc to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 14:14:49 | production | 2023-10-02 14:13:58 +0000 UTC Normal ReplicaSet.apps production-ui-collector-7696dc959d SuccessfulCreate Created pod: production-ui-collector-7696dc959d-8dxwc replicaset-controller logger.go:42: 14:14:49 | production | 2023-10-02 14:13:58 +0000 UTC Normal Deployment.apps production-ui-collector ScalingReplicaSet Scaled up replica set production-ui-collector-7696dc959d to 1 deployment-controller logger.go:42: 14:14:49 | production | 2023-10-02 14:13:58 +0000 UTC Normal Pod production-ui-query-54c89b74b9-8crcw Scheduled Successfully assigned kuttl-test-fond-sunbird/production-ui-query-54c89b74b9-8crcw to ip-10-0-123-159.ec2.internal default-scheduler logger.go:42: 14:14:49 | production | 2023-10-02 14:13:58 +0000 UTC Normal ReplicaSet.apps production-ui-query-54c89b74b9 SuccessfulCreate Created pod: production-ui-query-54c89b74b9-8crcw replicaset-controller logger.go:42: 14:14:49 | production | 2023-10-02 14:13:58 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-54c89b74b9 to 1 deployment-controller logger.go:42: 14:14:49 | production | 2023-10-02 14:14:00 +0000 UTC Normal Pod production-ui-collector-7696dc959d-8dxwc AddedInterface Add eth0 [10.129.2.117/23] from ovn-kubernetes logger.go:42: 14:14:49 | production | 2023-10-02 14:14:00 +0000 UTC Normal Pod production-ui-collector-7696dc959d-8dxwc.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:00 +0000 UTC Normal Pod production-ui-collector-7696dc959d-8dxwc.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:00 +0000 UTC Normal Pod production-ui-collector-7696dc959d-8dxwc.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:00 +0000 UTC Normal Pod production-ui-query-54c89b74b9-8crcw AddedInterface Add eth0 [10.129.2.118/23] from ovn-kubernetes logger.go:42: 14:14:49 | production | 2023-10-02 14:14:00 +0000 UTC Normal Pod production-ui-query-54c89b74b9-8crcw.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:00 +0000 UTC Normal Pod production-ui-query-54c89b74b9-8crcw.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:00 +0000 UTC Normal Pod production-ui-query-54c89b74b9-8crcw.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:00 +0000 UTC Normal Pod production-ui-query-54c89b74b9-8crcw.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:00 +0000 UTC Normal Pod production-ui-query-54c89b74b9-8crcw.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:00 +0000 UTC Normal Pod production-ui-query-54c89b74b9-8crcw.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:00 +0000 UTC Normal Pod production-ui-query-54c89b74b9-8crcw.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:00 +0000 UTC Normal Pod production-ui-query-54c89b74b9-8crcw.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:00 +0000 UTC Normal Pod production-ui-query-54c89b74b9-8crcw.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:13 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 14:14:49 | production | 2023-10-02 14:14:13 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 14:14:49 | production | 2023-10-02 14:14:13 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 14:14:49 | production | 2023-10-02 14:14:15 +0000 UTC Normal Pod production-ui-query-54c89b74b9-8crcw.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:15 +0000 UTC Normal Pod production-ui-query-54c89b74b9-8crcw.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:15 +0000 UTC Normal Pod production-ui-query-54c89b74b9-8crcw.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:15 +0000 UTC Normal ReplicaSet.apps production-ui-query-54c89b74b9 SuccessfulDelete Deleted pod: production-ui-query-54c89b74b9-8crcw replicaset-controller logger.go:42: 14:14:49 | production | 2023-10-02 14:14:15 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-54c89b74b9 to 0 from 1 deployment-controller logger.go:42: 14:14:49 | production | 2023-10-02 14:14:16 +0000 UTC Normal Pod production-ui-query-7b84d848d-bpk5n Scheduled Successfully assigned kuttl-test-fond-sunbird/production-ui-query-7b84d848d-bpk5n to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 14:14:49 | production | 2023-10-02 14:14:16 +0000 UTC Normal Pod production-ui-query-7b84d848d-bpk5n AddedInterface Add eth0 [10.128.2.62/23] from ovn-kubernetes logger.go:42: 14:14:49 | production | 2023-10-02 14:14:16 +0000 UTC Normal Pod production-ui-query-7b84d848d-bpk5n.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:16 +0000 UTC Normal Pod production-ui-query-7b84d848d-bpk5n.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:16 +0000 UTC Normal Pod production-ui-query-7b84d848d-bpk5n.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:16 +0000 UTC Normal Pod production-ui-query-7b84d848d-bpk5n.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:13be223107a317ed5d7d6e6d74d06a0e08aeedc09b5a0e0a1660d6dc8a3e8329" already present on machine kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:16 +0000 UTC Normal Pod production-ui-query-7b84d848d-bpk5n.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:16 +0000 UTC Normal Pod production-ui-query-7b84d848d-bpk5n.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:16 +0000 UTC Normal Pod production-ui-query-7b84d848d-bpk5n.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:16 +0000 UTC Normal Pod production-ui-query-7b84d848d-bpk5n.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:16 +0000 UTC Normal Pod production-ui-query-7b84d848d-bpk5n.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:16 +0000 UTC Normal ReplicaSet.apps production-ui-query-7b84d848d SuccessfulCreate Created pod: production-ui-query-7b84d848d-bpk5n replicaset-controller logger.go:42: 14:14:49 | production | 2023-10-02 14:14:16 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-7b84d848d to 1 deployment-controller logger.go:42: 14:14:49 | production | 2023-10-02 14:14:31 +0000 UTC Normal Pod production-ui-query-7b84d848d-bpk5n.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:31 +0000 UTC Normal Pod production-ui-query-7b84d848d-bpk5n.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:31 +0000 UTC Normal Pod production-ui-query-7b84d848d-bpk5n.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:31 +0000 UTC Normal ReplicaSet.apps production-ui-query-7b84d848d SuccessfulDelete Deleted pod: production-ui-query-7b84d848d-bpk5n replicaset-controller logger.go:42: 14:14:49 | production | 2023-10-02 14:14:31 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-7b84d848d to 0 from 1 deployment-controller logger.go:42: 14:14:49 | production | 2023-10-02 14:14:32 +0000 UTC Normal Pod production-ui-query-79d4c8fbcb-6whnw Scheduled Successfully assigned kuttl-test-fond-sunbird/production-ui-query-79d4c8fbcb-6whnw to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 14:14:49 | production | 2023-10-02 14:14:32 +0000 UTC Normal ReplicaSet.apps production-ui-query-79d4c8fbcb SuccessfulCreate Created pod: production-ui-query-79d4c8fbcb-6whnw replicaset-controller logger.go:42: 14:14:49 | production | 2023-10-02 14:14:32 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-79d4c8fbcb to 1 deployment-controller logger.go:42: 14:14:49 | production | 2023-10-02 14:14:33 +0000 UTC Normal Pod production-ui-query-79d4c8fbcb-6whnw AddedInterface Add eth0 [10.128.2.63/23] from ovn-kubernetes logger.go:42: 14:14:49 | production | 2023-10-02 14:14:33 +0000 UTC Normal Pod production-ui-query-79d4c8fbcb-6whnw.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:33 +0000 UTC Normal Pod production-ui-query-79d4c8fbcb-6whnw.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:33 +0000 UTC Normal Pod production-ui-query-79d4c8fbcb-6whnw.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:33 +0000 UTC Normal Pod production-ui-query-79d4c8fbcb-6whnw.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:33 +0000 UTC Normal Pod production-ui-query-79d4c8fbcb-6whnw.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:33 +0000 UTC Normal Pod production-ui-query-79d4c8fbcb-6whnw.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:42 +0000 UTC Normal Pod production-ui-query-79d4c8fbcb-6whnw.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:42 +0000 UTC Normal Pod production-ui-query-79d4c8fbcb-6whnw.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:42 +0000 UTC Normal ReplicaSet.apps production-ui-query-79d4c8fbcb SuccessfulDelete Deleted pod: production-ui-query-79d4c8fbcb-6whnw replicaset-controller logger.go:42: 14:14:49 | production | 2023-10-02 14:14:42 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-79d4c8fbcb to 0 from 1 deployment-controller logger.go:42: 14:14:49 | production | 2023-10-02 14:14:43 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 14:14:49 | production | 2023-10-02 14:14:43 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod production-ui-collector-7696dc959d-8dxwc horizontal-pod-autoscaler logger.go:42: 14:14:49 | production | 2023-10-02 14:14:43 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 14:14:49 | production | 2023-10-02 14:14:43 +0000 UTC Normal Pod production-ui-query-8fb8448bc-gqjk8 Scheduled Successfully assigned kuttl-test-fond-sunbird/production-ui-query-8fb8448bc-gqjk8 to ip-10-0-3-54.ec2.internal default-scheduler logger.go:42: 14:14:49 | production | 2023-10-02 14:14:43 +0000 UTC Normal ReplicaSet.apps production-ui-query-8fb8448bc SuccessfulCreate Created pod: production-ui-query-8fb8448bc-gqjk8 replicaset-controller logger.go:42: 14:14:49 | production | 2023-10-02 14:14:43 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-8fb8448bc to 1 deployment-controller logger.go:42: 14:14:49 | production | 2023-10-02 14:14:44 +0000 UTC Normal Pod production-ui-query-8fb8448bc-gqjk8 AddedInterface Add eth0 [10.128.2.64/23] from ovn-kubernetes logger.go:42: 14:14:49 | production | 2023-10-02 14:14:44 +0000 UTC Normal Pod production-ui-query-8fb8448bc-gqjk8.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:44 +0000 UTC Normal Pod production-ui-query-8fb8448bc-gqjk8.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:44 +0000 UTC Normal Pod production-ui-query-8fb8448bc-gqjk8.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:44 +0000 UTC Normal Pod production-ui-query-8fb8448bc-gqjk8.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:44 +0000 UTC Normal Pod production-ui-query-8fb8448bc-gqjk8.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 14:14:49 | production | 2023-10-02 14:14:44 +0000 UTC Normal Pod production-ui-query-8fb8448bc-gqjk8.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 14:14:49 | production | Deleting namespace: kuttl-test-fond-sunbird === CONT kuttl/harness/artifacts logger.go:42: 14:14:56 | artifacts | Creating namespace: kuttl-test-profound-mallard logger.go:42: 14:14:56 | artifacts | artifacts events from ns kuttl-test-profound-mallard: logger.go:42: 14:14:56 | artifacts | Deleting namespace: kuttl-test-profound-mallard === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (146.77s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/allinone (49.64s) --- PASS: kuttl/harness/production (90.80s) --- PASS: kuttl/harness/artifacts (6.29s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name ui --report --output /logs/artifacts/ui.xml ./artifacts/kuttl-report.xml time="2023-10-02T14:15:03Z" level=debug msg="Setting a new name for the test suites" time="2023-10-02T14:15:03Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-10-02T14:15:03Z" level=debug msg="normalizing test case names" time="2023-10-02T14:15:03Z" level=debug msg="ui/allinone -> ui_allinone" time="2023-10-02T14:15:03Z" level=debug msg="ui/production -> ui_production" time="2023-10-02T14:15:03Z" level=debug msg="ui/artifacts -> ui_artifacts" +---------------+--------+ | NAME | RESULT | +---------------+--------+ | ui_allinone | passed | | ui_production | passed | | ui_artifacts | passed | +---------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 1 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 3 -gt 0 ']' + count=2 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/sidecar.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/streaming.xml + '[' 3 -gt 0 ']' + count=3 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/ui.xml + '[' 0 -gt 0 ']' + '[' 3 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh upgrade false true + '[' 3 -ne 3 ']' + test_suite_name=upgrade + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/upgrade.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-upgrade make[2]: Entering directory '/tmp/jaeger-tests' make docker JAEGER_VERSION=1.49.1 IMG="quay.io//jaeger-operator:next" make[3]: Entering directory '/tmp/jaeger-tests' [ ! -z "true" ] || docker build --build-arg=GOPROXY= --build-arg=VERSION="1.49.0" --build-arg=JAEGER_VERSION=1.49.1 --build-arg=TARGETARCH= --build-arg VERSION_DATE=2023-10-02T14:15:03Z --build-arg VERSION_PKG="github.com/jaegertracing/jaeger-operator/pkg/version" -t "quay.io//jaeger-operator:next" . make[3]: Leaving directory '/tmp/jaeger-tests' touch build-e2e-upgrade-image SKIP_ES_EXTERNAL=true IMG=quay.io//jaeger-operator:"1.49.0" JAEGER_OPERATOR_VERSION="1.49.0" JAEGER_VERSION="1.49.0" ./tests/e2e/upgrade/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-29-231104 True False 100m Cluster version is 4.14.0-0.nightly-2023-09-29-231104' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-09-29-231104 True False 100m Cluster version is 4.14.0-0.nightly-2023-09-29-231104' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/upgrade/render.sh ++ export SUITE_DIR=./tests/e2e/upgrade ++ SUITE_DIR=./tests/e2e/upgrade ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/upgrade ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + export JAEGER_NAME + '[' true = true ']' + skip_test upgrade 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade + warning 'upgrade: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade: Test not supported in OpenShift\e[0m' WAR: upgrade: Test not supported in OpenShift + '[' true = true ']' + skip_test upgrade-from-latest-release 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade-from-latest-release + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade-from-latest-release + warning 'upgrade-from-latest-release: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade-from-latest-release: Test not supported in OpenShift\e[0m' WAR: upgrade-from-latest-release: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running upgrade E2E tests' Running upgrade E2E tests + cd tests/e2e/upgrade/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3595368357 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-69k515z0-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 14:15:04 | artifacts | Creating namespace: kuttl-test-pet-dassie logger.go:42: 14:15:04 | artifacts | artifacts events from ns kuttl-test-pet-dassie: logger.go:42: 14:15:04 | artifacts | Deleting namespace: kuttl-test-pet-dassie === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (6.10s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.05s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name upgrade --report --output /logs/artifacts/upgrade.xml ./artifacts/kuttl-report.xml time="2023-10-02T14:15:10Z" level=debug msg="Setting a new name for the test suites" time="2023-10-02T14:15:10Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-10-02T14:15:10Z" level=debug msg="normalizing test case names" time="2023-10-02T14:15:10Z" level=debug msg="upgrade/artifacts -> upgrade_artifacts" +-------------------+--------+ | NAME | RESULT | +-------------------+--------+ | upgrade_artifacts | passed | +-------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 1 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 3 -gt 0 ']' + count=2 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/sidecar.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/streaming.xml + '[' 3 -gt 0 ']' + count=3 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/ui.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/upgrade.xml + '[' 0 -gt 0 ']' + '[' 3 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests'