% Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 3831 100 3831 0 0 121k 0 --:--:-- --:--:-- --:--:-- 124k % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 2144 100 2144 0 0 56124 0 --:--:-- --:--:-- --:--:-- 56421 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 350 100 350 0 0 9375 0 --:--:-- --:--:-- --:--:-- 9459 Installing kuttl Try 0... curl -sLo /tmp/jaeger-tests/hack/install/../../bin/kubectl-kuttl https://github.com/kudobuilder/kuttl/releases/download/v0.15.0/kubectl-kuttl_0.15.0_linux_x86_64 KUBECONFIG file is: /tmp/kubeconfig-1500832312 for suite in elasticsearch examples generate miscellaneous sidecar streaming ui upgrade; do \ make run-e2e-tests-$suite ; \ done make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh elasticsearch false true + '[' 3 -ne 3 ']' + test_suite_name=elasticsearch + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/elasticsearch.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-elasticsearch make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true \ KAFKA_VERSION=0.32.0 \ SKIP_KAFKA=false \ ./tests/e2e/elasticsearch/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-10-06-234925 True False 6m31s Cluster version is 4.14.0-0.nightly-2023-10-06-234925' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-10-06-234925 True False 6m31s Cluster version is 4.14.0-0.nightly-2023-10-06-234925' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 0.32.0 ']' ++ version_le 0.32.0 0.25.0 +++ echo 0.32.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 0.32.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/elasticsearch/render.sh ++ export SUITE_DIR=./tests/e2e/elasticsearch ++ SUITE_DIR=./tests/e2e/elasticsearch ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/elasticsearch ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + start_test es-from-aio-to-production + '[' 1 -ne 1 ']' + test_name=es-from-aio-to-production + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-from-aio-to-production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-from-aio-to-production\e[0m' Rendering files for test es-from-aio-to-production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-from-aio-to-production + cd es-from-aio-to-production + jaeger_name=my-jaeger + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 03 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=03 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./03-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch.redundancyPolicy="ZeroRedundancy"' ./03-install.yaml + render_smoke_test my-jaeger true 04 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=04 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./04-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test es-increasing-replicas + '[' 1 -ne 1 ']' + test_name=es-increasing-replicas + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-increasing-replicas' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-increasing-replicas\e[0m' Rendering files for test es-increasing-replicas + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-from-aio-to-production + '[' es-from-aio-to-production '!=' _build ']' + cd .. + mkdir -p es-increasing-replicas + cd es-increasing-replicas + jaeger_name=simple-prod + '[' true = true ']' + jaeger_deployment_mode=production_autoprovisioned + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.replicas=2 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.query.replicas=2 ./02-install.yaml + cp ./01-assert.yaml ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.replicas=2 ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .status.readyReplicas=2 ./02-assert.yaml + render_smoke_test simple-prod true 03 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=03 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./03-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + cp ./02-install.yaml ./04-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.elasticsearch.nodeCount=2 ./04-install.yaml + /tmp/jaeger-tests/bin/gomplate -f ./openshift-check-es-nodes.yaml.template -o ./05-check-es-nodes.yaml + '[' true = true ']' + skip_test es-index-cleaner-upstream 'SKIP_ES_EXTERNAL is true' + '[' 2 -ne 2 ']' + test_name=es-index-cleaner-upstream + message='SKIP_ES_EXTERNAL is true' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-increasing-replicas + '[' es-increasing-replicas '!=' _build ']' + cd .. + rm -rf es-index-cleaner-upstream + warning 'es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true\e[0m' WAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true + '[' true = true ']' + es_index_cleaner -autoprov production_autoprovisioned + '[' 2 -ne 2 ']' + postfix=-autoprov + jaeger_deployment_strategy=production_autoprovisioned + start_test es-index-cleaner-autoprov + '[' 1 -ne 1 ']' + test_name=es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-index-cleaner-autoprov' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-index-cleaner-autoprov\e[0m' Rendering files for test es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-index-cleaner-autoprov + cd es-index-cleaner-autoprov + jaeger_name=test-es-index-cleaner-with-prefix + cronjob_name=test-es-index-cleaner-with-prefix-es-index-cleaner + secured_es_connection=false + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_managed_es ']' + ELASTICSEARCH_URL=https://elasticsearch + secured_es_connection=true + cp ../../es-index-cleaner-upstream/04-assert.yaml ../../es-index-cleaner-upstream/README.md . + render_install_jaeger test-es-index-cleaner-with-prefix production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options.es.index-prefix=""' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.enabled=false ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.numberOfDays=0 ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.esIndexCleaner.schedule="*/1 * * * *"' ./01-install.yaml + render_report_spans test-es-index-cleaner-with-prefix true 5 00 true 02 + '[' 6 -ne 6 ']' + jaeger=test-es-index-cleaner-with-prefix + is_secured=true + number_of_spans=5 + job_number=00 + ensure_reported_spans=true + test_step=02 + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + export JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + export JOB_NUMBER=00 + JOB_NUMBER=00 + export DAYS=5 + DAYS=5 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./02-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./02-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + sed 's~enabled: false~enabled: true~gi' ./01-install.yaml + CRONJOB_NAME=test-es-index-cleaner-with-prefix-es-index-cleaner + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/wait-for-cronjob-execution.yaml.template -o ./04-wait-es-index-cleaner.yaml + /tmp/jaeger-tests/bin/gomplate -f ./01-install.yaml -o ./05-install.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' 00 06 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + job_number=00 + test_step=06 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=test-es-index-cleaner-with-prefix-curator + JOB_NUMBER=00 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + MOUNT_SECRET=test-es-index-cleaner-with-prefix-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./06-check-indices.yaml + JOB_NUMBER=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./06-assert.yaml + '[' true = true ']' + get_elasticsearch_openshift_operator_version + export ESO_OPERATOR_VERSION + '[' true = true ']' ++ kubectl get pods -l name=elasticsearch-operator --all-namespaces '-o=jsonpath={.items[0].metadata.annotations.operatorframework\.io/properties}' + properties='{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.6"}}]}' + '[' -z '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.6"}}]}' ']' ++ echo '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.6"}}]}' ++ /tmp/jaeger-tests/bin/yq e -P '.properties.[] | select(.value.packageName == "elasticsearch-operator") | .value.version' + ESO_OPERATOR_VERSION=5.7.6 ++ version_ge 5.7.6 5.4 +++ echo 5.7.6 5.4 +++ tr ' ' '\n' +++ sort -rV +++ head -n 1 ++ test 5.7.6 == 5.7.6 + '[' -n '' ']' + skip_test es-index-cleaner-managed 'Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 2 -ne 2 ']' + test_name=es-index-cleaner-managed + message='Test only supported with Elasticsearch OpenShift Operator >= 5.4' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-index-cleaner-autoprov + '[' es-index-cleaner-autoprov '!=' _build ']' + cd .. + rm -rf es-index-cleaner-managed + warning 'es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4\e[0m' WAR: es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4 + '[' true = true ']' + start_test es-multiinstance + '[' 1 -ne 1 ']' + test_name=es-multiinstance + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-multiinstance' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-multiinstance\e[0m' Rendering files for test es-multiinstance + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-multiinstance + cd es-multiinstance + jaeger_name=instance-1 + render_install_jaeger instance-1 production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=instance-1 + JAEGER_NAME=instance-1 + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/gomplate -f ./03-create-second-instance.yaml.template -o 03-create-second-instance.yaml + '[' true = true ']' + skip_test es-rollover-upstream 'SKIP_ES_EXTERNAL is true' + '[' 2 -ne 2 ']' + test_name=es-rollover-upstream + message='SKIP_ES_EXTERNAL is true' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-multiinstance + '[' es-multiinstance '!=' _build ']' + cd .. + rm -rf es-rollover-upstream + warning 'es-rollover-upstream: SKIP_ES_EXTERNAL is true' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-rollover-upstream: SKIP_ES_EXTERNAL is true\e[0m' WAR: es-rollover-upstream: SKIP_ES_EXTERNAL is true + '[' true = true ']' + es_rollover -autoprov production_autoprovisioned + '[' 2 -ne 2 ']' + postfix=-autoprov + jaeger_deployment_strategy=production_autoprovisioned + start_test es-rollover-autoprov + '[' 1 -ne 1 ']' + test_name=es-rollover-autoprov + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-rollover-autoprov' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-rollover-autoprov\e[0m' Rendering files for test es-rollover-autoprov + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-rollover-autoprov + cd es-rollover-autoprov + cp ../../es-rollover-upstream/05-assert.yaml ../../es-rollover-upstream/05-install.yaml ../../es-rollover-upstream/README.md . + jaeger_name=my-jaeger + secured_es_connection=false + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_managed_es ']' + ELASTICSEARCH_URL=https://elasticsearch + secured_es_connection=true + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_report_spans my-jaeger true 2 00 true 02 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=00 + ensure_reported_spans=true + test_step=02 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=00 + JOB_NUMBER=00 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./02-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./02-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' 00 03 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + job_number=00 + test_step=03 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=00 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./03-check-indices.yaml + JOB_NUMBER=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./03-assert.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' 01 04 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + job_number=01 + test_step=04 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=01 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./04-check-indices.yaml + JOB_NUMBER=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./04-assert.yaml + render_report_spans my-jaeger true 2 02 true 06 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=02 + ensure_reported_spans=true + test_step=06 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=02 + JOB_NUMBER=02 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./06-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./06-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' 02 07 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + job_number=02 + test_step=07 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=02 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./07-check-indices.yaml + JOB_NUMBER=02 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./07-assert.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' 03 08 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + job_number=03 + test_step=08 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=03 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./08-check-indices.yaml + JOB_NUMBER=03 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./08-assert.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' 04 09 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + job_number=04 + test_step=09 + escape_command ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=04 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./09-check-indices.yaml + JOB_NUMBER=04 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./09-assert.yaml + render_report_spans my-jaeger true 2 03 true 10 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=03 + ensure_reported_spans=true + test_step=10 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=03 + JOB_NUMBER=03 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./10-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./10-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + CRONJOB_NAME=my-jaeger-es-rollover + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/wait-for-cronjob-execution.yaml.template -o ./11-wait-rollover.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-000002'\'',' 05 11 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-000002'\'',' + job_number=05 + test_step=11 + escape_command ''\''--name'\'', '\''jaeger-span-000002'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-000002'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-000002'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-000002'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-000002'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=05 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-000002'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./11-check-indices.yaml + JOB_NUMBER=05 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./11-assert.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' 06 12 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + job_number=06 + test_step=12 + escape_command ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=06 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./12-check-indices.yaml + JOB_NUMBER=06 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./12-assert.yaml + '[' true = true ']' + get_elasticsearch_openshift_operator_version + export ESO_OPERATOR_VERSION + '[' true = true ']' ++ kubectl get pods -l name=elasticsearch-operator --all-namespaces '-o=jsonpath={.items[0].metadata.annotations.operatorframework\.io/properties}' + properties='{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.6"}}]}' + '[' -z '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.6"}}]}' ']' ++ echo '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.6"}}]}' ++ /tmp/jaeger-tests/bin/yq e -P '.properties.[] | select(.value.packageName == "elasticsearch-operator") | .value.version' + ESO_OPERATOR_VERSION=5.7.6 ++ version_ge 5.7.6 5.4 +++ echo 5.7.6 5.4 +++ tr ' ' '\n' +++ sort -rV +++ head -n 1 ++ test 5.7.6 == 5.7.6 + '[' -n '' ']' + skip_test es-rollover-managed 'Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 2 -ne 2 ']' + test_name=es-rollover-managed + message='Test only supported with Elasticsearch OpenShift Operator >= 5.4' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-rollover-autoprov + '[' es-rollover-autoprov '!=' _build ']' + cd .. + rm -rf es-rollover-managed + warning 'es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4\e[0m' WAR: es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4 + '[' true = true ']' + skip_test es-spark-dependencies 'This test is not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=es-spark-dependencies + message='This test is not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + rm -rf es-spark-dependencies + warning 'es-spark-dependencies: This test is not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-spark-dependencies: This test is not supported in OpenShift\e[0m' WAR: es-spark-dependencies: This test is not supported in OpenShift + [[ true = true ]] + [[ false = false ]] + start_test es-streaming-autoprovisioned + '[' 1 -ne 1 ']' + test_name=es-streaming-autoprovisioned + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-streaming-autoprovisioned' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-streaming-autoprovisioned\e[0m' Rendering files for test es-streaming-autoprovisioned + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-streaming-autoprovisioned + cd es-streaming-autoprovisioned + jaeger_name=auto-provisioned + render_assert_kafka true auto-provisioned 00 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provisioned + test_step=00 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./1-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_smoke_test auto-provisioned true 04 + '[' 3 -ne 3 ']' + jaeger=auto-provisioned + is_secured=true + test_step=04 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://auto-provisioned-query:443 + JAEGER_QUERY_ENDPOINT=https://auto-provisioned-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://auto-provisioned-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://auto-provisioned-collector-headless:14268 + export JAEGER_NAME=auto-provisioned + JAEGER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./04-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running elasticsearch E2E tests' Running elasticsearch E2E tests + cd tests/e2e/elasticsearch/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1500832312 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 8 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/es-from-aio-to-production === PAUSE kuttl/harness/es-from-aio-to-production === RUN kuttl/harness/es-increasing-replicas === PAUSE kuttl/harness/es-increasing-replicas === RUN kuttl/harness/es-index-cleaner-autoprov === PAUSE kuttl/harness/es-index-cleaner-autoprov === RUN kuttl/harness/es-multiinstance === PAUSE kuttl/harness/es-multiinstance === RUN kuttl/harness/es-rollover-autoprov === PAUSE kuttl/harness/es-rollover-autoprov === RUN kuttl/harness/es-simple-prod === PAUSE kuttl/harness/es-simple-prod === RUN kuttl/harness/es-streaming-autoprovisioned === PAUSE kuttl/harness/es-streaming-autoprovisioned === CONT kuttl/harness/artifacts logger.go:42: 07:33:09 | artifacts | Creating namespace: kuttl-test-central-cobra logger.go:42: 07:33:09 | artifacts | artifacts events from ns kuttl-test-central-cobra: logger.go:42: 07:33:09 | artifacts | Deleting namespace: kuttl-test-central-cobra === CONT kuttl/harness/es-simple-prod logger.go:42: 07:33:15 | es-simple-prod | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:33:15 | es-simple-prod | Creating namespace: kuttl-test-smiling-humpback logger.go:42: 07:33:15 | es-simple-prod | es-simple-prod events from ns kuttl-test-smiling-humpback: logger.go:42: 07:33:15 | es-simple-prod | Deleting namespace: kuttl-test-smiling-humpback === CONT kuttl/harness/es-streaming-autoprovisioned logger.go:42: 07:33:21 | es-streaming-autoprovisioned | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:33:21 | es-streaming-autoprovisioned | Creating namespace: kuttl-test-handy-ram logger.go:42: 07:33:21 | es-streaming-autoprovisioned/0-install | starting test step 0-install logger.go:42: 07:33:21 | es-streaming-autoprovisioned/0-install | Jaeger:kuttl-test-handy-ram/auto-provisioned created logger.go:42: 07:34:40 | es-streaming-autoprovisioned/0-install | test step completed 0-install logger.go:42: 07:34:40 | es-streaming-autoprovisioned/1- | starting test step 1- logger.go:42: 07:35:10 | es-streaming-autoprovisioned/1- | test step completed 1- logger.go:42: 07:35:10 | es-streaming-autoprovisioned/2- | starting test step 2- logger.go:42: 07:35:52 | es-streaming-autoprovisioned/2- | test step completed 2- logger.go:42: 07:35:52 | es-streaming-autoprovisioned/3- | starting test step 3- logger.go:42: 07:36:04 | es-streaming-autoprovisioned/3- | test step completed 3- logger.go:42: 07:36:04 | es-streaming-autoprovisioned/4-smoke-test | starting test step 4-smoke-test logger.go:42: 07:36:04 | es-streaming-autoprovisioned/4-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE auto-provisioned /dev/null] logger.go:42: 07:36:07 | es-streaming-autoprovisioned/4-smoke-test | Warning: resource jaegers/auto-provisioned is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:36:13 | es-streaming-autoprovisioned/4-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JAEGER_COLLECTOR_ENDPOINT=http://auto-provisioned-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://auto-provisioned-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:36:14 | es-streaming-autoprovisioned/4-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:36:15 | es-streaming-autoprovisioned/4-smoke-test | job.batch/report-span created logger.go:42: 07:36:15 | es-streaming-autoprovisioned/4-smoke-test | job.batch/check-span created logger.go:42: 07:36:28 | es-streaming-autoprovisioned/4-smoke-test | test step completed 4-smoke-test logger.go:42: 07:36:28 | es-streaming-autoprovisioned | es-streaming-autoprovisioned events from ns kuttl-test-handy-ram: logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:33:29 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltesthandyramautoprovisioned-1-7bb749d695 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltesthandyramautoprovisioned-1-7bb74kf6l2 replicaset-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:33:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthandyramautoprovisioned-1-7bb74kf6l2 Binding Scheduled Successfully assigned kuttl-test-handy-ram/elasticsearch-cdm-kuttltesthandyramautoprovisioned-1-7bb74kf6l2 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:33:29 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltesthandyramautoprovisioned-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltesthandyramautoprovisioned-1-7bb749d695 to 1 deployment-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:33:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthandyramautoprovisioned-1-7bb74kf6l2 AddedInterface Add eth0 [10.128.2.23/23] from ovn-kubernetes logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:33:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthandyramautoprovisioned-1-7bb74kf6l2.spec.containers{elasticsearch} Pulling Pulling image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:33:36 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthandyramautoprovisioned-1-7bb74kf6l2.spec.containers{elasticsearch} Pulled Successfully pulled image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" in 6.449040078s (6.449053278s including waiting) kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:33:36 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthandyramautoprovisioned-1-7bb74kf6l2.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:33:36 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthandyramautoprovisioned-1-7bb74kf6l2.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:33:36 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthandyramautoprovisioned-1-7bb74kf6l2.spec.containers{proxy} Pulling Pulling image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:33:40 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthandyramautoprovisioned-1-7bb74kf6l2.spec.containers{proxy} Pulled Successfully pulled image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" in 4.030220349s (4.030229889s including waiting) kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:33:40 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthandyramautoprovisioned-1-7bb74kf6l2.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:33:40 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthandyramautoprovisioned-1-7bb74kf6l2.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:33:50 +0000 UTC Warning Pod elasticsearch-cdm-kuttltesthandyramautoprovisioned-1-7bb74kf6l2.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:02 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-zookeeper NoPods No matching pods found controllermanager logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:02 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:03 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ExternalProvisioning waiting for a volume to be created, either by external provisioner "ebs.csi.aws.com" or manually created by system administrator persistentvolume-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:03 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-handy-ram/data-auto-provisioned-zookeeper-0" logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:06 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-dfbaf276-a01f-4b94-bb55-8ddcb8af0ad0 logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:07 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-handy-ram/auto-provisioned-zookeeper-0 to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:09 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-dfbaf276-a01f-4b94-bb55-8ddcb8af0ad0" attachdetach-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:10 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 AddedInterface Add eth0 [10.129.2.19/23] from ovn-kubernetes logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:10 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulling Pulling image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:17 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulled Successfully pulled image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" in 7.54254541s (7.54255945s including waiting) kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:17 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:17 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:41 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-kafka NoPods No matching pods found controllermanager logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:41 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:41 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-handy-ram/data-0-auto-provisioned-kafka-0" logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:41 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ExternalProvisioning waiting for a volume to be created, either by external provisioner "ebs.csi.aws.com" or manually created by system administrator persistentvolume-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:45 +0000 UTC Normal Pod auto-provisioned-kafka-0 Binding Scheduled Successfully assigned kuttl-test-handy-ram/auto-provisioned-kafka-0 to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:45 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-9cd0b0bd-a08d-4ff9-992d-e481d768b270 logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:48 +0000 UTC Normal Pod auto-provisioned-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-9cd0b0bd-a08d-4ff9-992d-e481d768b270" attachdetach-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:50 +0000 UTC Normal Pod auto-provisioned-kafka-0 AddedInterface Add eth0 [10.129.2.20/23] from ovn-kubernetes logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:50 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" already present on machine kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:50 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:34:50 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:12 +0000 UTC Normal Pod auto-provisioned-entity-operator-695c886bf6-sjd6z Binding Scheduled Successfully assigned kuttl-test-handy-ram/auto-provisioned-entity-operator-695c886bf6-sjd6z to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:12 +0000 UTC Normal Pod auto-provisioned-entity-operator-695c886bf6-sjd6z AddedInterface Add eth0 [10.131.0.21/23] from ovn-kubernetes logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:12 +0000 UTC Normal Pod auto-provisioned-entity-operator-695c886bf6-sjd6z.spec.containers{topic-operator} Pulling Pulling image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:abdf1ca15c55860e7962057fcb84d2eef800996fcbc3f9f80eeb7efa79dbafcc" kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:12 +0000 UTC Normal ReplicaSet.apps auto-provisioned-entity-operator-695c886bf6 SuccessfulCreate Created pod: auto-provisioned-entity-operator-695c886bf6-sjd6z replicaset-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:12 +0000 UTC Normal Deployment.apps auto-provisioned-entity-operator ScalingReplicaSet Scaled up replica set auto-provisioned-entity-operator-695c886bf6 to 1 deployment-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:20 +0000 UTC Normal Pod auto-provisioned-entity-operator-695c886bf6-sjd6z.spec.containers{topic-operator} Pulled Successfully pulled image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:abdf1ca15c55860e7962057fcb84d2eef800996fcbc3f9f80eeb7efa79dbafcc" in 8.28290128s (8.282916931s including waiting) kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:20 +0000 UTC Normal Pod auto-provisioned-entity-operator-695c886bf6-sjd6z.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:20 +0000 UTC Normal Pod auto-provisioned-entity-operator-695c886bf6-sjd6z.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:20 +0000 UTC Normal Pod auto-provisioned-entity-operator-695c886bf6-sjd6z.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:abdf1ca15c55860e7962057fcb84d2eef800996fcbc3f9f80eeb7efa79dbafcc" already present on machine kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:21 +0000 UTC Normal Pod auto-provisioned-entity-operator-695c886bf6-sjd6z.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:21 +0000 UTC Normal Pod auto-provisioned-entity-operator-695c886bf6-sjd6z.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:21 +0000 UTC Normal Pod auto-provisioned-entity-operator-695c886bf6-sjd6z.spec.containers{tls-sidecar} Pulling Pulling image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:29 +0000 UTC Normal Pod auto-provisioned-entity-operator-695c886bf6-sjd6z.spec.containers{tls-sidecar} Pulled Successfully pulled image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" in 8.848193549s (8.848203859s including waiting) kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:30 +0000 UTC Normal Pod auto-provisioned-entity-operator-695c886bf6-sjd6z.spec.containers{tls-sidecar} Created Created container tls-sidecar kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:30 +0000 UTC Normal Pod auto-provisioned-entity-operator-695c886bf6-sjd6z.spec.containers{tls-sidecar} Started Started container tls-sidecar kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:54 +0000 UTC Normal Pod auto-provisioned-collector-f5f77c7f9-h5vrr Binding Scheduled Successfully assigned kuttl-test-handy-ram/auto-provisioned-collector-f5f77c7f9-h5vrr to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:54 +0000 UTC Normal Pod auto-provisioned-collector-f5f77c7f9-h5vrr AddedInterface Add eth0 [10.129.2.21/23] from ovn-kubernetes logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:54 +0000 UTC Normal Pod auto-provisioned-collector-f5f77c7f9-h5vrr.spec.containers{jaeger-collector} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:54 +0000 UTC Normal ReplicaSet.apps auto-provisioned-collector-f5f77c7f9 SuccessfulCreate Created pod: auto-provisioned-collector-f5f77c7f9-h5vrr replicaset-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:54 +0000 UTC Normal Deployment.apps auto-provisioned-collector ScalingReplicaSet Scaled up replica set auto-provisioned-collector-f5f77c7f9 to 1 deployment-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:54 +0000 UTC Normal Pod auto-provisioned-ingester-5b4854b5f8-lsnkr Binding Scheduled Successfully assigned kuttl-test-handy-ram/auto-provisioned-ingester-5b4854b5f8-lsnkr to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:54 +0000 UTC Normal Pod auto-provisioned-ingester-5b4854b5f8-lsnkr AddedInterface Add eth0 [10.129.2.22/23] from ovn-kubernetes logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:54 +0000 UTC Normal Pod auto-provisioned-ingester-5b4854b5f8-lsnkr.spec.containers{jaeger-ingester} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:6f019a3bc4c491d31a1af50e2929aa6e01b6c2c1fc12acbd0ef12204f4e56d07" kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:54 +0000 UTC Normal ReplicaSet.apps auto-provisioned-ingester-5b4854b5f8 SuccessfulCreate Created pod: auto-provisioned-ingester-5b4854b5f8-lsnkr replicaset-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:54 +0000 UTC Normal Deployment.apps auto-provisioned-ingester ScalingReplicaSet Scaled up replica set auto-provisioned-ingester-5b4854b5f8 to 1 deployment-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:54 +0000 UTC Normal Pod auto-provisioned-query-fffdcc7b7-nztcj Binding Scheduled Successfully assigned kuttl-test-handy-ram/auto-provisioned-query-fffdcc7b7-nztcj to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:54 +0000 UTC Normal Pod auto-provisioned-query-fffdcc7b7-nztcj AddedInterface Add eth0 [10.131.0.22/23] from ovn-kubernetes logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:54 +0000 UTC Normal Pod auto-provisioned-query-fffdcc7b7-nztcj.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:54 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-fffdcc7b7 SuccessfulCreate Created pod: auto-provisioned-query-fffdcc7b7-nztcj replicaset-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:54 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-fffdcc7b7 to 1 deployment-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:58 +0000 UTC Normal Pod auto-provisioned-query-fffdcc7b7-nztcj.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" in 3.954073346s (3.954087247s including waiting) kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:58 +0000 UTC Normal Pod auto-provisioned-query-fffdcc7b7-nztcj.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:58 +0000 UTC Normal Pod auto-provisioned-query-fffdcc7b7-nztcj.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:58 +0000 UTC Normal Pod auto-provisioned-query-fffdcc7b7-nztcj.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:58 +0000 UTC Normal Pod auto-provisioned-query-fffdcc7b7-nztcj.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:58 +0000 UTC Normal Pod auto-provisioned-query-fffdcc7b7-nztcj.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:35:58 +0000 UTC Normal Pod auto-provisioned-query-fffdcc7b7-nztcj.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:00 +0000 UTC Normal Pod auto-provisioned-collector-f5f77c7f9-h5vrr.spec.containers{jaeger-collector} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" in 5.443566774s (5.443591465s including waiting) kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:00 +0000 UTC Normal Pod auto-provisioned-collector-f5f77c7f9-h5vrr.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:00 +0000 UTC Normal Pod auto-provisioned-collector-f5f77c7f9-h5vrr.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:00 +0000 UTC Normal Pod auto-provisioned-ingester-5b4854b5f8-lsnkr.spec.containers{jaeger-ingester} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:6f019a3bc4c491d31a1af50e2929aa6e01b6c2c1fc12acbd0ef12204f4e56d07" in 5.396044012s (5.396051853s including waiting) kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:00 +0000 UTC Normal Pod auto-provisioned-ingester-5b4854b5f8-lsnkr.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:00 +0000 UTC Normal Pod auto-provisioned-ingester-5b4854b5f8-lsnkr.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:02 +0000 UTC Normal Pod auto-provisioned-query-fffdcc7b7-nztcj.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" in 3.476091532s (3.476108302s including waiting) kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:02 +0000 UTC Normal Pod auto-provisioned-query-fffdcc7b7-nztcj.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:02 +0000 UTC Normal Pod auto-provisioned-query-fffdcc7b7-nztcj.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:09 +0000 UTC Normal Pod auto-provisioned-query-fffdcc7b7-nztcj.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:09 +0000 UTC Normal Pod auto-provisioned-query-fffdcc7b7-nztcj.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:09 +0000 UTC Normal Pod auto-provisioned-query-fffdcc7b7-nztcj.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:09 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-fffdcc7b7 SuccessfulDelete Deleted pod: auto-provisioned-query-fffdcc7b7-nztcj replicaset-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:09 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled down replica set auto-provisioned-query-fffdcc7b7 to 0 from 1 deployment-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:10 +0000 UTC Normal Pod auto-provisioned-query-f6b4fd5d-5dfrr Binding Scheduled Successfully assigned kuttl-test-handy-ram/auto-provisioned-query-f6b4fd5d-5dfrr to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:10 +0000 UTC Normal Pod auto-provisioned-query-f6b4fd5d-5dfrr AddedInterface Add eth0 [10.131.0.23/23] from ovn-kubernetes logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:10 +0000 UTC Normal Pod auto-provisioned-query-f6b4fd5d-5dfrr.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:10 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-f6b4fd5d SuccessfulCreate Created pod: auto-provisioned-query-f6b4fd5d-5dfrr replicaset-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:10 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-f6b4fd5d to 1 deployment-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:11 +0000 UTC Normal Pod auto-provisioned-query-f6b4fd5d-5dfrr.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:11 +0000 UTC Normal Pod auto-provisioned-query-f6b4fd5d-5dfrr.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:11 +0000 UTC Normal Pod auto-provisioned-query-f6b4fd5d-5dfrr.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:11 +0000 UTC Normal Pod auto-provisioned-query-f6b4fd5d-5dfrr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:11 +0000 UTC Normal Pod auto-provisioned-query-f6b4fd5d-5dfrr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:11 +0000 UTC Normal Pod auto-provisioned-query-f6b4fd5d-5dfrr.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:11 +0000 UTC Normal Pod auto-provisioned-query-f6b4fd5d-5dfrr.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:11 +0000 UTC Normal Pod auto-provisioned-query-f6b4fd5d-5dfrr.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:15 +0000 UTC Normal Pod check-span-znptk Binding Scheduled Successfully assigned kuttl-test-handy-ram/check-span-znptk to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:15 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-znptk job-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:15 +0000 UTC Normal Pod report-span-fvksq Binding Scheduled Successfully assigned kuttl-test-handy-ram/report-span-fvksq to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:15 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-fvksq job-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:16 +0000 UTC Normal Pod check-span-znptk AddedInterface Add eth0 [10.129.2.24/23] from ovn-kubernetes logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:16 +0000 UTC Normal Pod check-span-znptk.spec.containers{asserts-container} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:16 +0000 UTC Normal Pod report-span-fvksq AddedInterface Add eth0 [10.129.2.23/23] from ovn-kubernetes logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:16 +0000 UTC Normal Pod report-span-fvksq.spec.containers{report-span} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:18 +0000 UTC Normal Pod check-span-znptk.spec.containers{asserts-container} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" in 1.754098766s (1.754113806s including waiting) kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:18 +0000 UTC Normal Pod check-span-znptk.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:18 +0000 UTC Normal Pod check-span-znptk.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:18 +0000 UTC Normal Pod report-span-fvksq.spec.containers{report-span} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" in 1.788268735s (1.788279144s including waiting) kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:18 +0000 UTC Normal Pod report-span-fvksq.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:18 +0000 UTC Normal Pod report-span-fvksq.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:36:28 | es-streaming-autoprovisioned | 2023-10-09 07:36:28 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:36:28 | es-streaming-autoprovisioned | Deleting namespace: kuttl-test-handy-ram === CONT kuttl/harness/es-index-cleaner-autoprov logger.go:42: 07:37:09 | es-index-cleaner-autoprov | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:37:09 | es-index-cleaner-autoprov | Creating namespace: kuttl-test-patient-rabbit logger.go:42: 07:37:09 | es-index-cleaner-autoprov/1-install | starting test step 1-install logger.go:42: 07:37:09 | es-index-cleaner-autoprov/1-install | Jaeger:kuttl-test-patient-rabbit/test-es-index-cleaner-with-prefix created logger.go:42: 07:37:46 | es-index-cleaner-autoprov/1-install | test step completed 1-install logger.go:42: 07:37:46 | es-index-cleaner-autoprov/2-report-spans | starting test step 2-report-spans logger.go:42: 07:37:46 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE test-es-index-cleaner-with-prefix /dev/null] logger.go:42: 07:37:47 | es-index-cleaner-autoprov/2-report-spans | Warning: resource jaegers/test-es-index-cleaner-with-prefix is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:37:54 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c DAYS=5 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JOB_NUMBER=00 JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-00-job.yaml] logger.go:42: 07:37:55 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c kubectl apply -f report-span-00-job.yaml -n $NAMESPACE] logger.go:42: 07:37:55 | es-index-cleaner-autoprov/2-report-spans | job.batch/00-report-span created logger.go:42: 07:38:34 | es-index-cleaner-autoprov/2-report-spans | test step completed 2-report-spans logger.go:42: 07:38:34 | es-index-cleaner-autoprov/3-install | starting test step 3-install logger.go:42: 07:38:34 | es-index-cleaner-autoprov/3-install | Jaeger:kuttl-test-patient-rabbit/test-es-index-cleaner-with-prefix updated logger.go:42: 07:38:34 | es-index-cleaner-autoprov/3-install | test step completed 3-install logger.go:42: 07:38:34 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | starting test step 4-wait-es-index-cleaner logger.go:42: 07:38:34 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | running command: [sh -c go run ../../../../cmd-utils/wait-cronjob/main.go --cronjob test-es-index-cleaner-with-prefix-es-index-cleaner --namespace $NAMESPACE] logger.go:42: 07:38:46 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-10-09T07:38:46Z" level=debug msg="Checking if the test-es-index-cleaner-with-prefix-es-index-cleaner CronJob exists" logger.go:42: 07:38:46 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-10-09T07:38:46Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 07:38:46 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-10-09T07:38:46Z" level=info msg="Cronjob test-es-index-cleaner-with-prefix-es-index-cleaner found successfully" logger.go:42: 07:38:46 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-10-09T07:38:46Z" level=debug msg="Waiting for the next scheduled job from test-es-index-cleaner-with-prefix-es-index-cleaner cronjob" logger.go:42: 07:38:46 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-10-09T07:38:46Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 07:38:46 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-10-09T07:38:46Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 07:38:56 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-10-09T07:38:56Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 07:39:06 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-10-09T07:39:06Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 07:39:16 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-10-09T07:39:16Z" level=info msg="Job of owner test-es-index-cleaner-with-prefix-es-index-cleaner succeeded after test-es-index-cleaner-with-prefix-es-index-cleaner 30.021697773s" logger.go:42: 07:39:16 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | test step completed 4-wait-es-index-cleaner logger.go:42: 07:39:16 | es-index-cleaner-autoprov/5-install | starting test step 5-install logger.go:42: 07:39:16 | es-index-cleaner-autoprov/5-install | Jaeger:kuttl-test-patient-rabbit/test-es-index-cleaner-with-prefix updated logger.go:42: 07:39:16 | es-index-cleaner-autoprov/5-install | test step completed 5-install logger.go:42: 07:39:16 | es-index-cleaner-autoprov/6-check-indices | starting test step 6-check-indices logger.go:42: 07:39:16 | es-index-cleaner-autoprov/6-check-indices | Job:kuttl-test-patient-rabbit/00-check-indices created logger.go:42: 07:39:20 | es-index-cleaner-autoprov/6-check-indices | test step completed 6-check-indices logger.go:42: 07:39:20 | es-index-cleaner-autoprov | es-index-cleaner-autoprov events from ns kuttl-test-patient-rabbit: logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:16 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpatientrabbittestesindexcle-1-655j6f Binding Scheduled Successfully assigned kuttl-test-patient-rabbit/elasticsearch-cdm-kuttltestpatientrabbittestesindexcle-1-655j6f to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:16 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestpatientrabbittestesindexcle-1-6589497b5 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestpatientrabbittestesindexcle-1-655j6f replicaset-controller logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:16 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestpatientrabbittestesindexcle-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestpatientrabbittestesindexcle-1-6589497b5 to 1 deployment-controller logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpatientrabbittestesindexcle-1-655j6f AddedInterface Add eth0 [10.128.2.24/23] from ovn-kubernetes logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpatientrabbittestesindexcle-1-655j6f.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpatientrabbittestesindexcle-1-655j6f.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpatientrabbittestesindexcle-1-655j6f.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpatientrabbittestesindexcle-1-655j6f.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpatientrabbittestesindexcle-1-655j6f.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpatientrabbittestesindexcle-1-655j6f.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:27 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpatientrabbittestesindexcle-1-655j6f.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:32 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpatientrabbittestesindexcle-1-655j6f.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:43 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-8659b69c48-dd8c6 Binding Scheduled Successfully assigned kuttl-test-patient-rabbit/test-es-index-cleaner-with-prefix-collector-8659b69c48-dd8c6 to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:43 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-8659b69c48-dd8c6 AddedInterface Add eth0 [10.129.2.25/23] from ovn-kubernetes logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:43 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-collector-8659b69c48 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-collector-8659b69c48-dd8c6 replicaset-controller logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:43 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-collector ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-collector-8659b69c48 to 1 deployment-controller logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:43 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69467c6fcc-mzbx8 Binding Scheduled Successfully assigned kuttl-test-patient-rabbit/test-es-index-cleaner-with-prefix-query-69467c6fcc-mzbx8 to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:43 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69467c6fcc-mzbx8 AddedInterface Add eth0 [10.131.0.24/23] from ovn-kubernetes logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:43 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-69467c6fcc SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-query-69467c6fcc-mzbx8 replicaset-controller logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:43 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-query-69467c6fcc to 1 deployment-controller logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-8659b69c48-dd8c6.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-8659b69c48-dd8c6.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-8659b69c48-dd8c6.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69467c6fcc-mzbx8.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69467c6fcc-mzbx8.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69467c6fcc-mzbx8.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69467c6fcc-mzbx8.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69467c6fcc-mzbx8.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69467c6fcc-mzbx8.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69467c6fcc-mzbx8.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69467c6fcc-mzbx8.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69467c6fcc-mzbx8.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:50 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-57749f4855-zzd8h Binding Scheduled Successfully assigned kuttl-test-patient-rabbit/test-es-index-cleaner-with-prefix-query-57749f4855-zzd8h to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:50 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-57749f4855 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-query-57749f4855-zzd8h replicaset-controller logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:50 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69467c6fcc-mzbx8.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:50 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69467c6fcc-mzbx8.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:50 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69467c6fcc-mzbx8.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:50 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-69467c6fcc SuccessfulDelete Deleted pod: test-es-index-cleaner-with-prefix-query-69467c6fcc-mzbx8 replicaset-controller logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:50 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled down replica set test-es-index-cleaner-with-prefix-query-69467c6fcc to 0 from 1 deployment-controller logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:50 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-query-57749f4855 to 1 deployment-controller logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-57749f4855-zzd8h AddedInterface Add eth0 [10.131.0.25/23] from ovn-kubernetes logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-57749f4855-zzd8h.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-57749f4855-zzd8h.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-57749f4855-zzd8h.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-57749f4855-zzd8h.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-57749f4855-zzd8h.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-57749f4855-zzd8h.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-57749f4855-zzd8h.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-57749f4855-zzd8h.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-57749f4855-zzd8h.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:55 +0000 UTC Normal Pod 00-report-span-ng9fz Binding Scheduled Successfully assigned kuttl-test-patient-rabbit/00-report-span-ng9fz to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:55 +0000 UTC Normal Job.batch 00-report-span SuccessfulCreate Created pod: 00-report-span-ng9fz job-controller logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:56 +0000 UTC Normal Pod 00-report-span-ng9fz AddedInterface Add eth0 [10.129.2.26/23] from ovn-kubernetes logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:56 +0000 UTC Normal Pod 00-report-span-ng9fz.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:56 +0000 UTC Normal Pod 00-report-span-ng9fz.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:37:56 +0000 UTC Normal Pod 00-report-span-ng9fz.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:38:01 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:38:01 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:38:01 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:38:34 +0000 UTC Normal Job.batch 00-report-span Completed Job completed job-controller logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:38:46 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:38:46 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod test-es-index-cleaner-with-prefix-collector-8659b69c48-dd8c6 horizontal-pod-autoscaler logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:38:46 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:39:00 +0000 UTC Normal Job.batch test-es-index-cleaner-with-prefix-es-index-cleaner-28280619 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-es-index-cleaner-2828061n7s42 job-controller logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:39:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2828061n7s42 Binding Scheduled Successfully assigned kuttl-test-patient-rabbit/test-es-index-cleaner-with-prefix-es-index-cleaner-2828061n7s42 to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:39:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2828061n7s42 AddedInterface Add eth0 [10.129.2.27/23] from ovn-kubernetes logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:39:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2828061n7s42.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-index-cleaner-rhel8@sha256:8ac1b958ff16ea16f4d0c7132e3d369848a829d6655e0b2338a9bef93d54f02d" kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:39:00 +0000 UTC Normal CronJob.batch test-es-index-cleaner-with-prefix-es-index-cleaner SuccessfulCreate Created job test-es-index-cleaner-with-prefix-es-index-cleaner-28280619 cronjob-controller logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:39:04 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2828061n7s42.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-index-cleaner-rhel8@sha256:8ac1b958ff16ea16f4d0c7132e3d369848a829d6655e0b2338a9bef93d54f02d" in 3.800363662s (3.800375492s including waiting) kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:39:04 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2828061n7s42.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Created Created container test-es-index-cleaner-with-prefix-es-index-cleaner kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:39:04 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2828061n7s42.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Started Started container test-es-index-cleaner-with-prefix-es-index-cleaner kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:39:07 +0000 UTC Normal Job.batch test-es-index-cleaner-with-prefix-es-index-cleaner-28280619 Completed Job completed job-controller logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:39:07 +0000 UTC Normal CronJob.batch test-es-index-cleaner-with-prefix-es-index-cleaner SawCompletedJob Saw completed job: test-es-index-cleaner-with-prefix-es-index-cleaner-28280619, status: Complete cronjob-controller logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:39:16 +0000 UTC Normal Pod 00-check-indices-c6m9t Binding Scheduled Successfully assigned kuttl-test-patient-rabbit/00-check-indices-c6m9t to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:39:16 +0000 UTC Normal Job.batch 00-check-indices SuccessfulCreate Created pod: 00-check-indices-c6m9t job-controller logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:39:17 +0000 UTC Normal Pod 00-check-indices-c6m9t AddedInterface Add eth0 [10.129.2.28/23] from ovn-kubernetes logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:39:17 +0000 UTC Normal Pod 00-check-indices-c6m9t.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:39:17 +0000 UTC Normal Pod 00-check-indices-c6m9t.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:39:17 +0000 UTC Normal Pod 00-check-indices-c6m9t.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:39:20 | es-index-cleaner-autoprov | 2023-10-09 07:39:20 +0000 UTC Normal Job.batch 00-check-indices Completed Job completed job-controller logger.go:42: 07:39:20 | es-index-cleaner-autoprov | Deleting namespace: kuttl-test-patient-rabbit === CONT kuttl/harness/es-rollover-autoprov logger.go:42: 07:39:27 | es-rollover-autoprov | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:39:27 | es-rollover-autoprov | Creating namespace: kuttl-test-deciding-kitten logger.go:42: 07:39:27 | es-rollover-autoprov/1-install | starting test step 1-install logger.go:42: 07:39:27 | es-rollover-autoprov/1-install | Jaeger:kuttl-test-deciding-kitten/my-jaeger created logger.go:42: 07:40:20 | es-rollover-autoprov/1-install | test step completed 1-install logger.go:42: 07:40:20 | es-rollover-autoprov/2-report-spans | starting test step 2-report-spans logger.go:42: 07:40:20 | es-rollover-autoprov/2-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:40:21 | es-rollover-autoprov/2-report-spans | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:40:28 | es-rollover-autoprov/2-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JOB_NUMBER=00 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-00-job.yaml] logger.go:42: 07:40:28 | es-rollover-autoprov/2-report-spans | running command: [sh -c kubectl apply -f report-span-00-job.yaml -n $NAMESPACE] logger.go:42: 07:40:28 | es-rollover-autoprov/2-report-spans | job.batch/00-report-span created logger.go:42: 07:40:53 | es-rollover-autoprov/2-report-spans | test step completed 2-report-spans logger.go:42: 07:40:53 | es-rollover-autoprov/3-check-indices | starting test step 3-check-indices logger.go:42: 07:40:53 | es-rollover-autoprov/3-check-indices | Job:kuttl-test-deciding-kitten/00-check-indices created logger.go:42: 07:40:57 | es-rollover-autoprov/3-check-indices | test step completed 3-check-indices logger.go:42: 07:40:57 | es-rollover-autoprov/4-check-indices | starting test step 4-check-indices logger.go:42: 07:40:57 | es-rollover-autoprov/4-check-indices | Job:kuttl-test-deciding-kitten/01-check-indices created logger.go:42: 07:41:01 | es-rollover-autoprov/4-check-indices | test step completed 4-check-indices logger.go:42: 07:41:01 | es-rollover-autoprov/5-install | starting test step 5-install logger.go:42: 07:41:01 | es-rollover-autoprov/5-install | Jaeger:kuttl-test-deciding-kitten/my-jaeger updated logger.go:42: 07:41:08 | es-rollover-autoprov/5-install | test step completed 5-install logger.go:42: 07:41:08 | es-rollover-autoprov/6-report-spans | starting test step 6-report-spans logger.go:42: 07:41:08 | es-rollover-autoprov/6-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:41:15 | es-rollover-autoprov/6-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JOB_NUMBER=02 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-02-job.yaml] logger.go:42: 07:41:16 | es-rollover-autoprov/6-report-spans | running command: [sh -c kubectl apply -f report-span-02-job.yaml -n $NAMESPACE] logger.go:42: 07:41:16 | es-rollover-autoprov/6-report-spans | job.batch/02-report-span created logger.go:42: 07:41:39 | es-rollover-autoprov/6-report-spans | test step completed 6-report-spans logger.go:42: 07:41:39 | es-rollover-autoprov/7-check-indices | starting test step 7-check-indices logger.go:42: 07:41:39 | es-rollover-autoprov/7-check-indices | Job:kuttl-test-deciding-kitten/02-check-indices created logger.go:42: 07:41:43 | es-rollover-autoprov/7-check-indices | test step completed 7-check-indices logger.go:42: 07:41:43 | es-rollover-autoprov/8-check-indices | starting test step 8-check-indices logger.go:42: 07:41:43 | es-rollover-autoprov/8-check-indices | Job:kuttl-test-deciding-kitten/03-check-indices created logger.go:42: 07:41:46 | es-rollover-autoprov/8-check-indices | test step completed 8-check-indices logger.go:42: 07:41:46 | es-rollover-autoprov/9-check-indices | starting test step 9-check-indices logger.go:42: 07:41:46 | es-rollover-autoprov/9-check-indices | Job:kuttl-test-deciding-kitten/04-check-indices created logger.go:42: 07:41:49 | es-rollover-autoprov/9-check-indices | test step completed 9-check-indices logger.go:42: 07:41:49 | es-rollover-autoprov/10-report-spans | starting test step 10-report-spans logger.go:42: 07:41:49 | es-rollover-autoprov/10-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:41:57 | es-rollover-autoprov/10-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JOB_NUMBER=03 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-03-job.yaml] logger.go:42: 07:41:57 | es-rollover-autoprov/10-report-spans | running command: [sh -c kubectl apply -f report-span-03-job.yaml -n $NAMESPACE] logger.go:42: 07:41:57 | es-rollover-autoprov/10-report-spans | job.batch/03-report-span created logger.go:42: 07:42:22 | es-rollover-autoprov/10-report-spans | test step completed 10-report-spans logger.go:42: 07:42:22 | es-rollover-autoprov/11-check-indices | starting test step 11-check-indices logger.go:42: 07:42:22 | es-rollover-autoprov/11-check-indices | running command: [sh -c go run ../../../../cmd-utils/wait-cronjob/main.go --cronjob my-jaeger-es-rollover --namespace $NAMESPACE] logger.go:42: 07:42:23 | es-rollover-autoprov/11-check-indices | time="2023-10-09T07:42:23Z" level=debug msg="Checking if the my-jaeger-es-rollover CronJob exists" logger.go:42: 07:42:23 | es-rollover-autoprov/11-check-indices | time="2023-10-09T07:42:23Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 07:42:23 | es-rollover-autoprov/11-check-indices | time="2023-10-09T07:42:23Z" level=info msg="Cronjob my-jaeger-es-rollover found successfully" logger.go:42: 07:42:23 | es-rollover-autoprov/11-check-indices | time="2023-10-09T07:42:23Z" level=debug msg="Waiting for the next scheduled job from my-jaeger-es-rollover cronjob" logger.go:42: 07:42:23 | es-rollover-autoprov/11-check-indices | time="2023-10-09T07:42:23Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 07:42:23 | es-rollover-autoprov/11-check-indices | time="2023-10-09T07:42:23Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 07:42:33 | es-rollover-autoprov/11-check-indices | time="2023-10-09T07:42:33Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 07:42:43 | es-rollover-autoprov/11-check-indices | time="2023-10-09T07:42:43Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 07:42:53 | es-rollover-autoprov/11-check-indices | time="2023-10-09T07:42:53Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 07:43:03 | es-rollover-autoprov/11-check-indices | time="2023-10-09T07:43:03Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 07:43:13 | es-rollover-autoprov/11-check-indices | time="2023-10-09T07:43:13Z" level=info msg="Job of owner my-jaeger-es-rollover succeeded after my-jaeger-es-rollover 50.028347957s" logger.go:42: 07:43:13 | es-rollover-autoprov/11-check-indices | Job:kuttl-test-deciding-kitten/05-check-indices created logger.go:42: 07:43:17 | es-rollover-autoprov/11-check-indices | test step completed 11-check-indices logger.go:42: 07:43:17 | es-rollover-autoprov/12-check-indices | starting test step 12-check-indices logger.go:42: 07:43:17 | es-rollover-autoprov/12-check-indices | Job:kuttl-test-deciding-kitten/06-check-indices created logger.go:42: 07:43:21 | es-rollover-autoprov/12-check-indices | test step completed 12-check-indices logger.go:42: 07:43:21 | es-rollover-autoprov | es-rollover-autoprov events from ns kuttl-test-deciding-kitten: logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:39:51 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestdecidingkittenmyjaeger-1-c884764b7 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestdecidingkittenmyjaeger-1-c88476tf7xm replicaset-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:39:51 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdecidingkittenmyjaeger-1-c88476tf7xm Binding Scheduled Successfully assigned kuttl-test-deciding-kitten/elasticsearch-cdm-kuttltestdecidingkittenmyjaeger-1-c88476tf7xm to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:39:51 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestdecidingkittenmyjaeger-1-c88476tf7xm FailedMount MountVolume.SetUp failed for volume "elasticsearch-metrics" : secret "elasticsearch-metrics" not found kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:39:51 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestdecidingkittenmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestdecidingkittenmyjaeger-1-c884764b7 to 1 deployment-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:39:52 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdecidingkittenmyjaeger-1-c88476tf7xm AddedInterface Add eth0 [10.128.2.25/23] from ovn-kubernetes logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:39:52 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdecidingkittenmyjaeger-1-c88476tf7xm.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:39:52 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdecidingkittenmyjaeger-1-c88476tf7xm.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:39:52 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdecidingkittenmyjaeger-1-c88476tf7xm.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:39:52 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdecidingkittenmyjaeger-1-c88476tf7xm.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:39:52 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdecidingkittenmyjaeger-1-c88476tf7xm.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:39:52 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdecidingkittenmyjaeger-1-c88476tf7xm.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:02 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestdecidingkittenmyjaeger-1-c88476tf7xm.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:07 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestdecidingkittenmyjaeger-1-c88476tf7xm.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:17 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-99mvd Binding Scheduled Successfully assigned kuttl-test-deciding-kitten/my-jaeger-collector-558ccfc8dd-99mvd to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:17 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-558ccfc8dd SuccessfulCreate Created pod: my-jaeger-collector-558ccfc8dd-99mvd replicaset-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:17 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-558ccfc8dd to 1 deployment-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:17 +0000 UTC Normal Pod my-jaeger-query-b958666d-njrj4 Binding Scheduled Successfully assigned kuttl-test-deciding-kitten/my-jaeger-query-b958666d-njrj4 to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:17 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-b958666d SuccessfulCreate Created pod: my-jaeger-query-b958666d-njrj4 replicaset-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:17 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-b958666d to 1 deployment-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:18 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-99mvd AddedInterface Add eth0 [10.129.2.29/23] from ovn-kubernetes logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:18 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-99mvd.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:18 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-99mvd.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:18 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-99mvd.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:18 +0000 UTC Normal Pod my-jaeger-query-b958666d-njrj4 AddedInterface Add eth0 [10.131.0.26/23] from ovn-kubernetes logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:18 +0000 UTC Normal Pod my-jaeger-query-b958666d-njrj4.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:18 +0000 UTC Normal Pod my-jaeger-query-b958666d-njrj4.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:18 +0000 UTC Normal Pod my-jaeger-query-b958666d-njrj4.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:18 +0000 UTC Normal Pod my-jaeger-query-b958666d-njrj4.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:18 +0000 UTC Normal Pod my-jaeger-query-b958666d-njrj4.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:18 +0000 UTC Normal Pod my-jaeger-query-b958666d-njrj4.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:18 +0000 UTC Normal Pod my-jaeger-query-b958666d-njrj4.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:18 +0000 UTC Normal Pod my-jaeger-query-b958666d-njrj4.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:18 +0000 UTC Normal Pod my-jaeger-query-b958666d-njrj4.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:24 +0000 UTC Normal Pod my-jaeger-query-b958666d-njrj4.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:24 +0000 UTC Normal Pod my-jaeger-query-b958666d-njrj4.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:24 +0000 UTC Normal Pod my-jaeger-query-b958666d-njrj4.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:24 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-b958666d SuccessfulDelete Deleted pod: my-jaeger-query-b958666d-njrj4 replicaset-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:24 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-b958666d to 0 from 1 deployment-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:25 +0000 UTC Normal Pod my-jaeger-query-7bfc4c6769-dp64b Binding Scheduled Successfully assigned kuttl-test-deciding-kitten/my-jaeger-query-7bfc4c6769-dp64b to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:25 +0000 UTC Normal Pod my-jaeger-query-7bfc4c6769-dp64b AddedInterface Add eth0 [10.131.0.27/23] from ovn-kubernetes logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:25 +0000 UTC Normal Pod my-jaeger-query-7bfc4c6769-dp64b.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:25 +0000 UTC Normal Pod my-jaeger-query-7bfc4c6769-dp64b.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:25 +0000 UTC Normal Pod my-jaeger-query-7bfc4c6769-dp64b.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:25 +0000 UTC Normal Pod my-jaeger-query-7bfc4c6769-dp64b.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:25 +0000 UTC Normal Pod my-jaeger-query-7bfc4c6769-dp64b.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:25 +0000 UTC Normal Pod my-jaeger-query-7bfc4c6769-dp64b.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:25 +0000 UTC Normal Pod my-jaeger-query-7bfc4c6769-dp64b.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:25 +0000 UTC Normal Pod my-jaeger-query-7bfc4c6769-dp64b.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:25 +0000 UTC Normal Pod my-jaeger-query-7bfc4c6769-dp64b.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:25 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-7bfc4c6769 SuccessfulCreate Created pod: my-jaeger-query-7bfc4c6769-dp64b replicaset-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:25 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-7bfc4c6769 to 1 deployment-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:28 +0000 UTC Normal Pod 00-report-span-bzwwk Binding Scheduled Successfully assigned kuttl-test-deciding-kitten/00-report-span-bzwwk to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:28 +0000 UTC Normal Job.batch 00-report-span SuccessfulCreate Created pod: 00-report-span-bzwwk job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:29 +0000 UTC Normal Pod 00-report-span-bzwwk AddedInterface Add eth0 [10.129.2.30/23] from ovn-kubernetes logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:29 +0000 UTC Normal Pod 00-report-span-bzwwk.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:29 +0000 UTC Normal Pod 00-report-span-bzwwk.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:29 +0000 UTC Normal Pod 00-report-span-bzwwk.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:35 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:35 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:35 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:52 +0000 UTC Normal Job.batch 00-report-span Completed Job completed job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:53 +0000 UTC Normal Pod 00-check-indices-8f8pr Binding Scheduled Successfully assigned kuttl-test-deciding-kitten/00-check-indices-8f8pr to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:53 +0000 UTC Normal Pod 00-check-indices-8f8pr AddedInterface Add eth0 [10.129.2.31/23] from ovn-kubernetes logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:53 +0000 UTC Normal Pod 00-check-indices-8f8pr.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:53 +0000 UTC Normal Pod 00-check-indices-8f8pr.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:53 +0000 UTC Normal Pod 00-check-indices-8f8pr.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:53 +0000 UTC Normal Job.batch 00-check-indices SuccessfulCreate Created pod: 00-check-indices-8f8pr job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:56 +0000 UTC Normal Job.batch 00-check-indices Completed Job completed job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:57 +0000 UTC Normal Pod 01-check-indices-jz4hp Binding Scheduled Successfully assigned kuttl-test-deciding-kitten/01-check-indices-jz4hp to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:57 +0000 UTC Normal Pod 01-check-indices-jz4hp AddedInterface Add eth0 [10.129.2.32/23] from ovn-kubernetes logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:57 +0000 UTC Normal Pod 01-check-indices-jz4hp.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:57 +0000 UTC Normal Pod 01-check-indices-jz4hp.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:57 +0000 UTC Normal Pod 01-check-indices-jz4hp.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:40:57 +0000 UTC Normal Job.batch 01-check-indices SuccessfulCreate Created pod: 01-check-indices-jz4hp job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:00 +0000 UTC Normal Job.batch 01-check-indices Completed Job completed job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:02 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-js4cr Binding Scheduled Successfully assigned kuttl-test-deciding-kitten/my-jaeger-es-rollover-create-mapping-js4cr to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:02 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-js4cr AddedInterface Add eth0 [10.129.2.33/23] from ovn-kubernetes logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:02 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-js4cr.spec.containers{my-jaeger-es-rollover-create-mapping} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:02 +0000 UTC Normal Job.batch my-jaeger-es-rollover-create-mapping SuccessfulCreate Created pod: my-jaeger-es-rollover-create-mapping-js4cr job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:04 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-js4cr.spec.containers{my-jaeger-es-rollover-create-mapping} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" in 1.741125184s (1.741133994s including waiting) kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:04 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-js4cr.spec.containers{my-jaeger-es-rollover-create-mapping} Created Created container my-jaeger-es-rollover-create-mapping kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:04 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-js4cr.spec.containers{my-jaeger-es-rollover-create-mapping} Started Started container my-jaeger-es-rollover-create-mapping kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:05 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:05 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod my-jaeger-collector-558ccfc8dd-99mvd horizontal-pod-autoscaler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:06 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:07 +0000 UTC Normal Job.batch my-jaeger-es-rollover-create-mapping Completed Job completed job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:08 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-99mvd.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:08 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-558ccfc8dd SuccessfulDelete Deleted pod: my-jaeger-collector-558ccfc8dd-99mvd replicaset-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:08 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled down replica set my-jaeger-collector-558ccfc8dd to 0 from 1 deployment-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:08 +0000 UTC Normal Pod my-jaeger-query-7bfc4c6769-dp64b.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:08 +0000 UTC Normal Pod my-jaeger-query-7bfc4c6769-dp64b.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:08 +0000 UTC Normal Pod my-jaeger-query-7bfc4c6769-dp64b.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:08 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-7bfc4c6769 SuccessfulDelete Deleted pod: my-jaeger-query-7bfc4c6769-dp64b replicaset-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:08 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-7bfc4c6769 to 0 from 1 deployment-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:09 +0000 UTC Normal Pod my-jaeger-collector-74dd5d98f7-jq8ss Binding Scheduled Successfully assigned kuttl-test-deciding-kitten/my-jaeger-collector-74dd5d98f7-jq8ss to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:09 +0000 UTC Normal Pod my-jaeger-collector-74dd5d98f7-jq8ss AddedInterface Add eth0 [10.129.2.34/23] from ovn-kubernetes logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:09 +0000 UTC Normal Pod my-jaeger-collector-74dd5d98f7-jq8ss.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:09 +0000 UTC Normal Pod my-jaeger-collector-74dd5d98f7-jq8ss.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:09 +0000 UTC Normal Pod my-jaeger-collector-74dd5d98f7-jq8ss.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:09 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-74dd5d98f7 SuccessfulCreate Created pod: my-jaeger-collector-74dd5d98f7-jq8ss replicaset-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:09 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-74dd5d98f7 to 1 deployment-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:09 +0000 UTC Normal Pod my-jaeger-query-69bb4fd956-vqsqq Binding Scheduled Successfully assigned kuttl-test-deciding-kitten/my-jaeger-query-69bb4fd956-vqsqq to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:09 +0000 UTC Normal Pod my-jaeger-query-69bb4fd956-vqsqq AddedInterface Add eth0 [10.131.0.28/23] from ovn-kubernetes logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:09 +0000 UTC Normal Pod my-jaeger-query-69bb4fd956-vqsqq.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:09 +0000 UTC Normal Pod my-jaeger-query-69bb4fd956-vqsqq.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:09 +0000 UTC Normal Pod my-jaeger-query-69bb4fd956-vqsqq.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:09 +0000 UTC Normal Pod my-jaeger-query-69bb4fd956-vqsqq.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:09 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-69bb4fd956 SuccessfulCreate Created pod: my-jaeger-query-69bb4fd956-vqsqq replicaset-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:09 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-69bb4fd956 to 1 deployment-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:10 +0000 UTC Normal Pod my-jaeger-query-69bb4fd956-vqsqq.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:10 +0000 UTC Normal Pod my-jaeger-query-69bb4fd956-vqsqq.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:10 +0000 UTC Normal Pod my-jaeger-query-69bb4fd956-vqsqq.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:10 +0000 UTC Normal Pod my-jaeger-query-69bb4fd956-vqsqq.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:10 +0000 UTC Normal Pod my-jaeger-query-69bb4fd956-vqsqq.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:16 +0000 UTC Normal Pod 02-report-span-kzwgp Binding Scheduled Successfully assigned kuttl-test-deciding-kitten/02-report-span-kzwgp to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:16 +0000 UTC Normal Pod 02-report-span-kzwgp AddedInterface Add eth0 [10.129.2.35/23] from ovn-kubernetes logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:16 +0000 UTC Normal Job.batch 02-report-span SuccessfulCreate Created pod: 02-report-span-kzwgp job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:17 +0000 UTC Normal Pod 02-report-span-kzwgp.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:17 +0000 UTC Normal Pod 02-report-span-kzwgp.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:17 +0000 UTC Normal Pod 02-report-span-kzwgp.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:39 +0000 UTC Normal Pod 02-check-indices-hppll Binding Scheduled Successfully assigned kuttl-test-deciding-kitten/02-check-indices-hppll to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:39 +0000 UTC Normal Job.batch 02-check-indices SuccessfulCreate Created pod: 02-check-indices-hppll job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:39 +0000 UTC Normal Job.batch 02-report-span Completed Job completed job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:40 +0000 UTC Normal Pod 02-check-indices-hppll AddedInterface Add eth0 [10.129.2.36/23] from ovn-kubernetes logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:40 +0000 UTC Normal Pod 02-check-indices-hppll.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:40 +0000 UTC Normal Pod 02-check-indices-hppll.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:40 +0000 UTC Normal Pod 02-check-indices-hppll.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:43 +0000 UTC Normal Job.batch 02-check-indices Completed Job completed job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:43 +0000 UTC Normal Pod 03-check-indices-l4nkt Binding Scheduled Successfully assigned kuttl-test-deciding-kitten/03-check-indices-l4nkt to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:43 +0000 UTC Normal Job.batch 03-check-indices SuccessfulCreate Created pod: 03-check-indices-l4nkt job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:44 +0000 UTC Normal Pod 03-check-indices-l4nkt AddedInterface Add eth0 [10.129.2.37/23] from ovn-kubernetes logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:44 +0000 UTC Normal Pod 03-check-indices-l4nkt.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:44 +0000 UTC Normal Pod 03-check-indices-l4nkt.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:44 +0000 UTC Normal Pod 03-check-indices-l4nkt.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:46 +0000 UTC Normal Job.batch 03-check-indices Completed Job completed job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:46 +0000 UTC Normal Pod 04-check-indices-vhggs Binding Scheduled Successfully assigned kuttl-test-deciding-kitten/04-check-indices-vhggs to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:46 +0000 UTC Normal Job.batch 04-check-indices SuccessfulCreate Created pod: 04-check-indices-vhggs job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:47 +0000 UTC Normal Pod 04-check-indices-vhggs AddedInterface Add eth0 [10.129.2.38/23] from ovn-kubernetes logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:47 +0000 UTC Normal Pod 04-check-indices-vhggs.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:47 +0000 UTC Normal Pod 04-check-indices-vhggs.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:47 +0000 UTC Normal Pod 04-check-indices-vhggs.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:49 +0000 UTC Normal Job.batch 04-check-indices Completed Job completed job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:57 +0000 UTC Normal Job.batch 03-report-span SuccessfulCreate Created pod: 03-report-span-85tkt job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:58 +0000 UTC Normal Pod 03-report-span-85tkt Binding Scheduled Successfully assigned kuttl-test-deciding-kitten/03-report-span-85tkt to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:58 +0000 UTC Normal Pod 03-report-span-85tkt AddedInterface Add eth0 [10.129.2.39/23] from ovn-kubernetes logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:58 +0000 UTC Normal Pod 03-report-span-85tkt.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:58 +0000 UTC Normal Pod 03-report-span-85tkt.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:41:58 +0000 UTC Normal Pod 03-report-span-85tkt.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:42:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28280622-g8kc7 Binding Scheduled Successfully assigned kuttl-test-deciding-kitten/my-jaeger-es-lookback-28280622-g8kc7 to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:42:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28280622-g8kc7 AddedInterface Add eth0 [10.129.2.40/23] from ovn-kubernetes logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:42:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28280622-g8kc7.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:42:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28280622-g8kc7.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:42:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28280622-g8kc7.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:42:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28280622 SuccessfulCreate Created pod: my-jaeger-es-lookback-28280622-g8kc7 job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:42:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28280622 cronjob-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:42:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28280622-ss4sn Binding Scheduled Successfully assigned kuttl-test-deciding-kitten/my-jaeger-es-rollover-28280622-ss4sn to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:42:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28280622-ss4sn AddedInterface Add eth0 [10.129.2.41/23] from ovn-kubernetes logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:42:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28280622-ss4sn.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:42:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28280622-ss4sn.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:42:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28280622-ss4sn.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:42:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28280622 SuccessfulCreate Created pod: my-jaeger-es-rollover-28280622-ss4sn job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:42:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28280622 cronjob-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:42:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28280622 Completed Job completed job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:42:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28280622, status: Complete cronjob-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:42:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28280622 Completed Job completed job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:42:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28280622, status: Complete cronjob-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:42:06 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod my-jaeger-collector-74dd5d98f7-jq8ss horizontal-pod-autoscaler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:42:21 +0000 UTC Normal Job.batch 03-report-span Completed Job completed job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28280623-76ghc Binding Scheduled Successfully assigned kuttl-test-deciding-kitten/my-jaeger-es-lookback-28280623-76ghc to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28280623-76ghc AddedInterface Add eth0 [10.129.2.42/23] from ovn-kubernetes logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28280623-76ghc.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28280623-76ghc.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28280623-76ghc.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28280623 SuccessfulCreate Created pod: my-jaeger-es-lookback-28280623-76ghc job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28280623 cronjob-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28280623-tpnd9 Binding Scheduled Successfully assigned kuttl-test-deciding-kitten/my-jaeger-es-rollover-28280623-tpnd9 to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28280623-tpnd9 AddedInterface Add eth0 [10.129.2.43/23] from ovn-kubernetes logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28280623-tpnd9.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:a5d7cb53d884501cb02b8204fc496678dbc58245900a61a0611f6e04525557fd" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28280623-tpnd9.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28280623-tpnd9.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28280623 SuccessfulCreate Created pod: my-jaeger-es-rollover-28280623-tpnd9 job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28280623 cronjob-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28280623 Completed Job completed job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28280623, status: Complete cronjob-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28280623 Completed Job completed job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28280623, status: Complete cronjob-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:13 +0000 UTC Normal Pod 05-check-indices-vjrqc Binding Scheduled Successfully assigned kuttl-test-deciding-kitten/05-check-indices-vjrqc to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:13 +0000 UTC Normal Pod 05-check-indices-vjrqc AddedInterface Add eth0 [10.129.2.44/23] from ovn-kubernetes logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:13 +0000 UTC Normal Pod 05-check-indices-vjrqc.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:13 +0000 UTC Normal Pod 05-check-indices-vjrqc.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:13 +0000 UTC Normal Pod 05-check-indices-vjrqc.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:13 +0000 UTC Normal Job.batch 05-check-indices SuccessfulCreate Created pod: 05-check-indices-vjrqc job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:16 +0000 UTC Normal Job.batch 05-check-indices Completed Job completed job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:17 +0000 UTC Normal Pod 06-check-indices-jvm9k Binding Scheduled Successfully assigned kuttl-test-deciding-kitten/06-check-indices-jvm9k to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:17 +0000 UTC Normal Pod 06-check-indices-jvm9k AddedInterface Add eth0 [10.129.2.45/23] from ovn-kubernetes logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:17 +0000 UTC Normal Pod 06-check-indices-jvm9k.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:17 +0000 UTC Normal Pod 06-check-indices-jvm9k.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:17 +0000 UTC Normal Pod 06-check-indices-jvm9k.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:17 +0000 UTC Normal Job.batch 06-check-indices SuccessfulCreate Created pod: 06-check-indices-jvm9k job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | 2023-10-09 07:43:20 +0000 UTC Normal Job.batch 06-check-indices Completed Job completed job-controller logger.go:42: 07:43:21 | es-rollover-autoprov | Deleting namespace: kuttl-test-deciding-kitten === CONT kuttl/harness/es-multiinstance logger.go:42: 07:43:28 | es-multiinstance | Ignoring 03-create-second-instance.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:43:28 | es-multiinstance | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:43:28 | es-multiinstance | Creating namespace: kuttl-test-model-donkey logger.go:42: 07:43:28 | es-multiinstance/0-clear-namespace | starting test step 0-clear-namespace logger.go:42: 07:43:28 | es-multiinstance/0-clear-namespace | running command: [sh -c kubectl delete namespace jaeger-e2e-multiinstance-test --ignore-not-found=true] logger.go:42: 07:43:28 | es-multiinstance/0-clear-namespace | test step completed 0-clear-namespace logger.go:42: 07:43:28 | es-multiinstance/1-install | starting test step 1-install logger.go:42: 07:43:28 | es-multiinstance/1-install | Jaeger:kuttl-test-model-donkey/instance-1 created logger.go:42: 07:44:03 | es-multiinstance/1-install | test step completed 1-install logger.go:42: 07:44:03 | es-multiinstance/2-create-namespace | starting test step 2-create-namespace logger.go:42: 07:44:03 | es-multiinstance/2-create-namespace | running command: [sh -c kubectl create namespace jaeger-e2e-multiinstance-test] logger.go:42: 07:44:03 | es-multiinstance/2-create-namespace | namespace/jaeger-e2e-multiinstance-test created logger.go:42: 07:44:03 | es-multiinstance/2-create-namespace | test step completed 2-create-namespace logger.go:42: 07:44:03 | es-multiinstance/3-create-second-instance | starting test step 3-create-second-instance logger.go:42: 07:44:03 | es-multiinstance/3-create-second-instance | running command: [sh -c kubectl apply -f ./01-install.yaml -n jaeger-e2e-multiinstance-test] logger.go:42: 07:44:04 | es-multiinstance/3-create-second-instance | jaeger.jaegertracing.io/instance-1 created logger.go:42: 07:44:04 | es-multiinstance/3-create-second-instance | running command: [sh -c /tmp/jaeger-tests/bin/kubectl-kuttl assert ./01-assert.yaml -n jaeger-e2e-multiinstance-test --timeout 1000] logger.go:42: 07:44:57 | es-multiinstance/3-create-second-instance | assert is valid logger.go:42: 07:44:57 | es-multiinstance/3-create-second-instance | test step completed 3-create-second-instance logger.go:42: 07:44:57 | es-multiinstance/4-check-secrets | starting test step 4-check-secrets logger.go:42: 07:44:57 | es-multiinstance/4-check-secrets | running command: [sh -c kubectl get secrets elasticsearch -o jsonpath='{.data.logging-es\.crt}' -n $NAMESPACE > secret1] logger.go:42: 07:44:57 | es-multiinstance/4-check-secrets | running command: [sh -c kubectl get secrets elasticsearch -o jsonpath='{.data.logging-es\.crt}' -n jaeger-e2e-multiinstance-test > secret2] logger.go:42: 07:44:57 | es-multiinstance/4-check-secrets | running command: [sh -c cmp --silent secret1 secret2 || exit 0] logger.go:42: 07:44:57 | es-multiinstance/4-check-secrets | test step completed 4-check-secrets logger.go:42: 07:44:57 | es-multiinstance/5-delete | starting test step 5-delete logger.go:42: 07:44:57 | es-multiinstance/5-delete | running command: [sh -c kubectl delete namespace jaeger-e2e-multiinstance-test --wait=false] logger.go:42: 07:44:57 | es-multiinstance/5-delete | namespace "jaeger-e2e-multiinstance-test" deleted logger.go:42: 07:44:57 | es-multiinstance/5-delete | test step completed 5-delete logger.go:42: 07:44:57 | es-multiinstance | es-multiinstance events from ns kuttl-test-model-donkey: logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:43:34 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestmodeldonkeyinstance1-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestmodeldonkeyinstance1-1-55977bfb79 to 1 deployment-controller logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:43:35 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestmodeldonkeyinstance1-1-55977bfb79 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestmodeldonkeyinstance1-1-55977bfbwnsnp replicaset-controller logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:43:35 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmodeldonkeyinstance1-1-55977bfbwnsnp Binding Scheduled Successfully assigned kuttl-test-model-donkey/elasticsearch-cdm-kuttltestmodeldonkeyinstance1-1-55977bfbwnsnp to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:43:35 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmodeldonkeyinstance1-1-55977bfbwnsnp AddedInterface Add eth0 [10.128.2.26/23] from ovn-kubernetes logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:43:35 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmodeldonkeyinstance1-1-55977bfbwnsnp.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:43:35 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmodeldonkeyinstance1-1-55977bfbwnsnp.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:43:35 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmodeldonkeyinstance1-1-55977bfbwnsnp.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:43:35 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmodeldonkeyinstance1-1-55977bfbwnsnp.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:43:35 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmodeldonkeyinstance1-1-55977bfbwnsnp.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:43:35 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmodeldonkeyinstance1-1-55977bfbwnsnp.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:43:45 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestmodeldonkeyinstance1-1-55977bfbwnsnp.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:43:50 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestmodeldonkeyinstance1-1-55977bfbwnsnp.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:00 +0000 UTC Normal Pod instance-1-collector-5dd4d98b8-cgqj4 Binding Scheduled Successfully assigned kuttl-test-model-donkey/instance-1-collector-5dd4d98b8-cgqj4 to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:00 +0000 UTC Normal ReplicaSet.apps instance-1-collector-5dd4d98b8 SuccessfulCreate Created pod: instance-1-collector-5dd4d98b8-cgqj4 replicaset-controller logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:00 +0000 UTC Normal Deployment.apps instance-1-collector ScalingReplicaSet Scaled up replica set instance-1-collector-5dd4d98b8 to 1 deployment-controller logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:00 +0000 UTC Normal Pod instance-1-query-757f86bb59-xcsnc Binding Scheduled Successfully assigned kuttl-test-model-donkey/instance-1-query-757f86bb59-xcsnc to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:00 +0000 UTC Normal ReplicaSet.apps instance-1-query-757f86bb59 SuccessfulCreate Created pod: instance-1-query-757f86bb59-xcsnc replicaset-controller logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:00 +0000 UTC Normal Deployment.apps instance-1-query ScalingReplicaSet Scaled up replica set instance-1-query-757f86bb59 to 1 deployment-controller logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:01 +0000 UTC Normal Pod instance-1-collector-5dd4d98b8-cgqj4 AddedInterface Add eth0 [10.129.2.46/23] from ovn-kubernetes logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:01 +0000 UTC Normal Pod instance-1-collector-5dd4d98b8-cgqj4.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:01 +0000 UTC Normal Pod instance-1-collector-5dd4d98b8-cgqj4.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:01 +0000 UTC Normal Pod instance-1-collector-5dd4d98b8-cgqj4.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:01 +0000 UTC Normal Pod instance-1-query-757f86bb59-xcsnc AddedInterface Add eth0 [10.131.0.29/23] from ovn-kubernetes logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:01 +0000 UTC Normal Pod instance-1-query-757f86bb59-xcsnc.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:01 +0000 UTC Normal Pod instance-1-query-757f86bb59-xcsnc.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:01 +0000 UTC Normal Pod instance-1-query-757f86bb59-xcsnc.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:01 +0000 UTC Normal Pod instance-1-query-757f86bb59-xcsnc.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:01 +0000 UTC Normal Pod instance-1-query-757f86bb59-xcsnc.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:01 +0000 UTC Normal Pod instance-1-query-757f86bb59-xcsnc.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:01 +0000 UTC Normal Pod instance-1-query-757f86bb59-xcsnc.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:01 +0000 UTC Normal Pod instance-1-query-757f86bb59-xcsnc.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:01 +0000 UTC Normal Pod instance-1-query-757f86bb59-xcsnc.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:04 +0000 UTC Normal ReplicaSet.apps instance-1-query-757f86bb59 SuccessfulDelete Deleted pod: instance-1-query-757f86bb59-xcsnc replicaset-controller logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:04 +0000 UTC Normal Deployment.apps instance-1-query ScalingReplicaSet Scaled down replica set instance-1-query-757f86bb59 to 0 from 1 deployment-controller logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:05 +0000 UTC Normal Pod instance-1-query-757f86bb59-56djb Binding Scheduled Successfully assigned kuttl-test-model-donkey/instance-1-query-757f86bb59-56djb to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:05 +0000 UTC Normal Pod instance-1-query-757f86bb59-56djb AddedInterface Add eth0 [10.129.2.47/23] from ovn-kubernetes logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:05 +0000 UTC Normal Pod instance-1-query-757f86bb59-56djb.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:05 +0000 UTC Normal ReplicaSet.apps instance-1-query-757f86bb59 SuccessfulCreate Created pod: instance-1-query-757f86bb59-56djb replicaset-controller logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:05 +0000 UTC Normal Deployment.apps instance-1-query ScalingReplicaSet Scaled up replica set instance-1-query-757f86bb59 to 1 from 0 deployment-controller logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:06 +0000 UTC Normal Pod instance-1-query-757f86bb59-xcsnc.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:06 +0000 UTC Normal Pod instance-1-query-757f86bb59-xcsnc.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:06 +0000 UTC Normal Pod instance-1-query-757f86bb59-xcsnc.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:08 +0000 UTC Normal Pod instance-1-query-757f86bb59-56djb.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" in 3.185876509s (3.185892001s including waiting) kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:09 +0000 UTC Normal Pod instance-1-query-757f86bb59-56djb.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:09 +0000 UTC Normal Pod instance-1-query-757f86bb59-56djb.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:09 +0000 UTC Normal Pod instance-1-query-757f86bb59-56djb.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:09 +0000 UTC Normal Pod instance-1-query-757f86bb59-56djb.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:09 +0000 UTC Normal Pod instance-1-query-757f86bb59-56djb.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:09 +0000 UTC Normal Pod instance-1-query-757f86bb59-56djb.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:12 +0000 UTC Normal Pod instance-1-query-757f86bb59-56djb.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" in 2.74801765s (2.74803098s including waiting) kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:12 +0000 UTC Normal Pod instance-1-query-757f86bb59-56djb.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:12 +0000 UTC Normal Pod instance-1-query-757f86bb59-56djb.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:19 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:19 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:19 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:34 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod instance-1-collector-5dd4d98b8-cgqj4 horizontal-pod-autoscaler logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:49 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:44:57 | es-multiinstance | 2023-10-09 07:44:49 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:44:57 | es-multiinstance | Deleting namespace: kuttl-test-model-donkey === CONT kuttl/harness/es-increasing-replicas logger.go:42: 07:45:04 | es-increasing-replicas | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:45:04 | es-increasing-replicas | Ignoring check-es-nodes.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:45:04 | es-increasing-replicas | Ignoring openshift-check-es-nodes.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:45:04 | es-increasing-replicas | Creating namespace: kuttl-test-brief-tahr logger.go:42: 07:45:04 | es-increasing-replicas/1-install | starting test step 1-install logger.go:42: 07:45:04 | es-increasing-replicas/1-install | Jaeger:kuttl-test-brief-tahr/simple-prod created logger.go:42: 07:45:41 | es-increasing-replicas/1-install | test step completed 1-install logger.go:42: 07:45:41 | es-increasing-replicas/2-install | starting test step 2-install logger.go:42: 07:45:41 | es-increasing-replicas/2-install | Jaeger:kuttl-test-brief-tahr/simple-prod updated logger.go:42: 07:45:52 | es-increasing-replicas/2-install | test step completed 2-install logger.go:42: 07:45:52 | es-increasing-replicas/3-smoke-test | starting test step 3-smoke-test logger.go:42: 07:45:52 | es-increasing-replicas/3-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 07:45:53 | es-increasing-replicas/3-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:45:59 | es-increasing-replicas/3-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:46:00 | es-increasing-replicas/3-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:46:00 | es-increasing-replicas/3-smoke-test | job.batch/report-span created logger.go:42: 07:46:00 | es-increasing-replicas/3-smoke-test | job.batch/check-span created logger.go:42: 07:46:12 | es-increasing-replicas/3-smoke-test | test step completed 3-smoke-test logger.go:42: 07:46:12 | es-increasing-replicas/4-install | starting test step 4-install logger.go:42: 07:46:12 | es-increasing-replicas/4-install | Jaeger:kuttl-test-brief-tahr/simple-prod updated logger.go:42: 07:46:12 | es-increasing-replicas/4-install | test step completed 4-install logger.go:42: 07:46:12 | es-increasing-replicas/5-check-es-nodes | starting test step 5-check-es-nodes logger.go:42: 07:46:12 | es-increasing-replicas/5-check-es-nodes | running command: [sh -c ./check-es-nodes.sh $NAMESPACE] logger.go:42: 07:46:12 | es-increasing-replicas/5-check-es-nodes | Checking if the number of ES instances is the expected logger.go:42: 07:46:12 | es-increasing-replicas/5-check-es-nodes | false logger.go:42: 07:46:12 | es-increasing-replicas/5-check-es-nodes | Error: no matches found logger.go:42: 07:46:17 | es-increasing-replicas/5-check-es-nodes | Checking if the number of ES instances is the expected logger.go:42: 07:46:17 | es-increasing-replicas/5-check-es-nodes | true logger.go:42: 07:46:17 | es-increasing-replicas/5-check-es-nodes | test step completed 5-check-es-nodes logger.go:42: 07:46:17 | es-increasing-replicas | es-increasing-replicas events from ns kuttl-test-brief-tahr: logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:10 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestbrieftahrsimpleprod-1-6987c75fc7 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestbrieftahrsimpleprod-1-6987c75fctn8xv replicaset-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrieftahrsimpleprod-1-6987c75fctn8xv Binding Scheduled Successfully assigned kuttl-test-brief-tahr/elasticsearch-cdm-kuttltestbrieftahrsimpleprod-1-6987c75fctn8xv to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:10 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestbrieftahrsimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestbrieftahrsimpleprod-1-6987c75fc7 to 1 deployment-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:11 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrieftahrsimpleprod-1-6987c75fctn8xv AddedInterface Add eth0 [10.131.0.31/23] from ovn-kubernetes logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:11 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrieftahrsimpleprod-1-6987c75fctn8xv.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:11 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrieftahrsimpleprod-1-6987c75fctn8xv.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:11 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrieftahrsimpleprod-1-6987c75fctn8xv.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:11 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrieftahrsimpleprod-1-6987c75fctn8xv.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:11 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrieftahrsimpleprod-1-6987c75fctn8xv.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:11 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrieftahrsimpleprod-1-6987c75fctn8xv.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:21 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestbrieftahrsimpleprod-1-6987c75fctn8xv.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:26 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestbrieftahrsimpleprod-1-6987c75fctn8xv.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:37 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-mfxpn Binding Scheduled Successfully assigned kuttl-test-brief-tahr/simple-prod-collector-5499b86c46-mfxpn to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:37 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5499b86c46 SuccessfulCreate Created pod: simple-prod-collector-5499b86c46-mfxpn replicaset-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:37 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-5499b86c46 to 1 deployment-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:37 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-tvcwd Binding Scheduled Successfully assigned kuttl-test-brief-tahr/simple-prod-query-54c8fd57bc-tvcwd to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:37 +0000 UTC Normal ReplicaSet.apps simple-prod-query-54c8fd57bc SuccessfulCreate Created pod: simple-prod-query-54c8fd57bc-tvcwd replicaset-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:37 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-54c8fd57bc to 1 deployment-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:38 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-mfxpn AddedInterface Add eth0 [10.129.2.50/23] from ovn-kubernetes logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:38 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-mfxpn.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:38 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-mfxpn.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:38 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-mfxpn.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:38 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-tvcwd AddedInterface Add eth0 [10.129.2.51/23] from ovn-kubernetes logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:38 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-tvcwd.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:38 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-tvcwd.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:38 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-tvcwd.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:38 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-tvcwd.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:38 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-tvcwd.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:38 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-tvcwd.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:38 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-tvcwd.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:38 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-tvcwd.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:38 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-tvcwd.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:42 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-w6ldp Binding Scheduled Successfully assigned kuttl-test-brief-tahr/simple-prod-collector-5499b86c46-w6ldp to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:42 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-w6ldp AddedInterface Add eth0 [10.128.2.28/23] from ovn-kubernetes logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:42 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-w6ldp.spec.containers{jaeger-collector} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:42 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5499b86c46 SuccessfulCreate Created pod: simple-prod-collector-5499b86c46-w6ldp replicaset-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:42 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-5499b86c46 to 2 from 1 deployment-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:42 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-hg5r5 Binding Scheduled Successfully assigned kuttl-test-brief-tahr/simple-prod-query-54c8fd57bc-hg5r5 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:42 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-hg5r5 AddedInterface Add eth0 [10.128.2.29/23] from ovn-kubernetes logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:42 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-hg5r5.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:42 +0000 UTC Normal ReplicaSet.apps simple-prod-query-54c8fd57bc SuccessfulCreate Created pod: simple-prod-query-54c8fd57bc-hg5r5 replicaset-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:42 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-54c8fd57bc to 2 from 1 deployment-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:45 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-w6ldp.spec.containers{jaeger-collector} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" in 2.654884751s (2.654896751s including waiting) kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:45 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-w6ldp.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:45 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-w6ldp.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:46 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-hg5r5.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" in 3.445352803s (3.445366683s including waiting) kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:46 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-hg5r5.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:46 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-hg5r5.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:46 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-hg5r5.spec.containers{oauth-proxy} Pulling Pulling image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:47 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-hg5r5.spec.containers{oauth-proxy} Pulled Successfully pulled image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" in 1.482447198s (1.482460909s including waiting) kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:48 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-hg5r5.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:48 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-hg5r5.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:48 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-hg5r5.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:50 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-hg5r5.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" in 2.765974764s (2.765987714s including waiting) kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:50 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-hg5r5.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:50 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-hg5r5.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:54 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-tvcwd.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:54 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-tvcwd.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:54 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-tvcwd.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:54 +0000 UTC Normal ReplicaSet.apps simple-prod-query-54c8fd57bc SuccessfulDelete Deleted pod: simple-prod-query-54c8fd57bc-hg5r5 replicaset-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:54 +0000 UTC Normal ReplicaSet.apps simple-prod-query-54c8fd57bc SuccessfulDelete Deleted pod: simple-prod-query-54c8fd57bc-tvcwd replicaset-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:54 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-54c8fd57bc to 0 from 2 deployment-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:55 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-hg5r5.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:55 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-hg5r5.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:55 +0000 UTC Normal Pod simple-prod-query-54c8fd57bc-hg5r5.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:56 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-5625d Binding Scheduled Successfully assigned kuttl-test-brief-tahr/simple-prod-query-5cf9c7c46f-5625d to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:56 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-5625d AddedInterface Add eth0 [10.129.2.52/23] from ovn-kubernetes logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:56 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-5625d.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:56 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-5625d.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:56 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-5625d.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:56 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-5625d.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:56 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-5625d.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:56 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-5625d.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:56 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-5625d.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:56 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-kdtmt Binding Scheduled Successfully assigned kuttl-test-brief-tahr/simple-prod-query-5cf9c7c46f-kdtmt to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:56 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-kdtmt AddedInterface Add eth0 [10.128.2.30/23] from ovn-kubernetes logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:56 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-kdtmt.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:56 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-kdtmt.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:56 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-kdtmt.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:56 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-kdtmt.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:56 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-kdtmt.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:56 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-kdtmt.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:56 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-kdtmt.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:56 +0000 UTC Normal ReplicaSet.apps simple-prod-query-5cf9c7c46f SuccessfulCreate Created pod: simple-prod-query-5cf9c7c46f-5625d replicaset-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:56 +0000 UTC Normal ReplicaSet.apps simple-prod-query-5cf9c7c46f SuccessfulCreate Created pod: simple-prod-query-5cf9c7c46f-kdtmt replicaset-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:56 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-5cf9c7c46f to 2 deployment-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:57 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-5625d.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:57 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-5625d.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:57 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-kdtmt.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:45:57 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-kdtmt.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:00 +0000 UTC Normal Pod check-span-6r6qm Binding Scheduled Successfully assigned kuttl-test-brief-tahr/check-span-6r6qm to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:00 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-6r6qm job-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:00 +0000 UTC Normal Pod report-span-mrnv7 Binding Scheduled Successfully assigned kuttl-test-brief-tahr/report-span-mrnv7 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:00 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-mrnv7 job-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:01 +0000 UTC Normal Pod check-span-6r6qm AddedInterface Add eth0 [10.129.2.53/23] from ovn-kubernetes logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:01 +0000 UTC Normal Pod check-span-6r6qm.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:01 +0000 UTC Normal Pod check-span-6r6qm.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:01 +0000 UTC Normal Pod check-span-6r6qm.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:01 +0000 UTC Normal Pod report-span-mrnv7 AddedInterface Add eth0 [10.128.2.31/23] from ovn-kubernetes logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:01 +0000 UTC Normal Pod report-span-mrnv7.spec.containers{report-span} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:02 +0000 UTC Normal Pod report-span-mrnv7.spec.containers{report-span} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" in 1.782060338s (1.782090159s including waiting) kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:03 +0000 UTC Normal Pod report-span-mrnv7.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:03 +0000 UTC Normal Pod report-span-mrnv7.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:12 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:13 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-mfxpn.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:13 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-w6ldp.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:13 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5499b86c46 SuccessfulDelete Deleted pod: simple-prod-collector-5499b86c46-mfxpn replicaset-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:13 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5499b86c46 SuccessfulDelete Deleted pod: simple-prod-collector-5499b86c46-w6ldp replicaset-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:13 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled down replica set simple-prod-collector-5499b86c46 to 0 from 2 deployment-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:13 +0000 UTC Warning Endpoints simple-prod-collector FailedToUpdateEndpoint Failed to update endpoint kuttl-test-brief-tahr/simple-prod-collector: Operation cannot be fulfilled on endpoints "simple-prod-collector": the object has been modified; please apply your changes to the latest version and try again endpoint-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:13 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-5625d.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:13 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-5625d.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:13 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-5625d.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:13 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-kdtmt.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:13 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-kdtmt.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:13 +0000 UTC Normal Pod simple-prod-query-5cf9c7c46f-kdtmt.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:13 +0000 UTC Normal ReplicaSet.apps simple-prod-query-5cf9c7c46f SuccessfulDelete Deleted pod: simple-prod-query-5cf9c7c46f-5625d replicaset-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:13 +0000 UTC Normal ReplicaSet.apps simple-prod-query-5cf9c7c46f SuccessfulDelete Deleted pod: simple-prod-query-5cf9c7c46f-kdtmt replicaset-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:13 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-5cf9c7c46f to 0 from 2 deployment-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:14 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestbrieftahrsimpleprod-2-9654ccbd9 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestbrieftahrsimpleprod-2-9654ccbd9gr2m2 replicaset-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrieftahrsimpleprod-2-9654ccbd9gr2m2 Binding Scheduled Successfully assigned kuttl-test-brief-tahr/elasticsearch-cdm-kuttltestbrieftahrsimpleprod-2-9654ccbd9gr2m2 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:14 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestbrieftahrsimpleprod-2 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestbrieftahrsimpleprod-2-9654ccbd9 to 1 deployment-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:14 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-5rk85 Binding Scheduled Successfully assigned kuttl-test-brief-tahr/simple-prod-collector-55656dcb65-5rk85 to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:14 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-tvdl7 Binding Scheduled Successfully assigned kuttl-test-brief-tahr/simple-prod-collector-55656dcb65-tvdl7 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:14 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-55656dcb65 SuccessfulCreate Created pod: simple-prod-collector-55656dcb65-tvdl7 replicaset-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:14 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-55656dcb65 SuccessfulCreate Created pod: simple-prod-collector-55656dcb65-5rk85 replicaset-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:14 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-55656dcb65 to 2 deployment-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:14 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-gtvz6 Binding Scheduled Successfully assigned kuttl-test-brief-tahr/simple-prod-query-5b8d4bb5cd-gtvz6 to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:14 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-gtvz6 AddedInterface Add eth0 [10.129.2.54/23] from ovn-kubernetes logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:14 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-hv6x5 Binding Scheduled Successfully assigned kuttl-test-brief-tahr/simple-prod-query-5b8d4bb5cd-hv6x5 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:14 +0000 UTC Normal ReplicaSet.apps simple-prod-query-5b8d4bb5cd SuccessfulCreate Created pod: simple-prod-query-5b8d4bb5cd-hv6x5 replicaset-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:14 +0000 UTC Normal ReplicaSet.apps simple-prod-query-5b8d4bb5cd SuccessfulCreate Created pod: simple-prod-query-5b8d4bb5cd-gtvz6 replicaset-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:14 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-5b8d4bb5cd to 2 deployment-controller logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrieftahrsimpleprod-2-9654ccbd9gr2m2 AddedInterface Add eth0 [10.128.2.34/23] from ovn-kubernetes logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrieftahrsimpleprod-2-9654ccbd9gr2m2.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrieftahrsimpleprod-2-9654ccbd9gr2m2.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrieftahrsimpleprod-2-9654ccbd9gr2m2.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrieftahrsimpleprod-2-9654ccbd9gr2m2.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrieftahrsimpleprod-2-9654ccbd9gr2m2.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbrieftahrsimpleprod-2-9654ccbd9gr2m2.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-5rk85 AddedInterface Add eth0 [10.129.2.55/23] from ovn-kubernetes logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-5rk85.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-5rk85.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-5rk85.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-tvdl7 AddedInterface Add eth0 [10.128.2.33/23] from ovn-kubernetes logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-tvdl7.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-tvdl7.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-collector-55656dcb65-tvdl7.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-gtvz6.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-gtvz6.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-gtvz6.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-gtvz6.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-gtvz6.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-gtvz6.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-gtvz6.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-gtvz6.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-gtvz6.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-hv6x5 AddedInterface Add eth0 [10.128.2.32/23] from ovn-kubernetes logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-hv6x5.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-hv6x5.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-hv6x5.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-hv6x5.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-hv6x5.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-hv6x5.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-hv6x5.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-hv6x5.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:46:17 | es-increasing-replicas | 2023-10-09 07:46:15 +0000 UTC Normal Pod simple-prod-query-5b8d4bb5cd-hv6x5.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:46:17 | es-increasing-replicas | Deleting namespace: kuttl-test-brief-tahr === CONT kuttl/harness/es-from-aio-to-production logger.go:42: 07:46:51 | es-from-aio-to-production | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:46:51 | es-from-aio-to-production | Creating namespace: kuttl-test-mutual-bull logger.go:42: 07:46:51 | es-from-aio-to-production/0-install | starting test step 0-install logger.go:42: 07:46:51 | es-from-aio-to-production/0-install | Jaeger:kuttl-test-mutual-bull/my-jaeger created logger.go:42: 07:46:59 | es-from-aio-to-production/0-install | test step completed 0-install logger.go:42: 07:46:59 | es-from-aio-to-production/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:46:59 | es-from-aio-to-production/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:47:01 | es-from-aio-to-production/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:47:07 | es-from-aio-to-production/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:47:08 | es-from-aio-to-production/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:47:08 | es-from-aio-to-production/1-smoke-test | job.batch/report-span created logger.go:42: 07:47:08 | es-from-aio-to-production/1-smoke-test | job.batch/check-span created logger.go:42: 07:47:14 | es-from-aio-to-production/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:47:14 | es-from-aio-to-production/3-install | starting test step 3-install logger.go:42: 07:47:14 | es-from-aio-to-production/3-install | Jaeger:kuttl-test-mutual-bull/my-jaeger updated logger.go:42: 07:47:48 | es-from-aio-to-production/3-install | test step completed 3-install logger.go:42: 07:47:48 | es-from-aio-to-production/4-smoke-test | starting test step 4-smoke-test logger.go:42: 07:47:48 | es-from-aio-to-production/4-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:47:55 | es-from-aio-to-production/4-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:47:56 | es-from-aio-to-production/4-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:47:56 | es-from-aio-to-production/4-smoke-test | job.batch/report-span unchanged logger.go:42: 07:47:56 | es-from-aio-to-production/4-smoke-test | job.batch/check-span unchanged logger.go:42: 07:47:56 | es-from-aio-to-production/4-smoke-test | test step completed 4-smoke-test logger.go:42: 07:47:56 | es-from-aio-to-production | es-from-aio-to-production events from ns kuttl-test-mutual-bull: logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:46:55 +0000 UTC Normal Pod my-jaeger-7c7466b5d5-2dsh6 Binding Scheduled Successfully assigned kuttl-test-mutual-bull/my-jaeger-7c7466b5d5-2dsh6 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:46:55 +0000 UTC Normal Pod my-jaeger-7c7466b5d5-2dsh6 AddedInterface Add eth0 [10.128.2.35/23] from ovn-kubernetes logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:46:55 +0000 UTC Normal Pod my-jaeger-7c7466b5d5-2dsh6.spec.containers{jaeger} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:46:55 +0000 UTC Normal ReplicaSet.apps my-jaeger-7c7466b5d5 SuccessfulCreate Created pod: my-jaeger-7c7466b5d5-2dsh6 replicaset-controller logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:46:55 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-7c7466b5d5 to 1 deployment-controller logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:46:58 +0000 UTC Normal Pod my-jaeger-7c7466b5d5-2dsh6.spec.containers{jaeger} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" in 2.518903171s (2.518912732s including waiting) kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:46:58 +0000 UTC Normal Pod my-jaeger-7c7466b5d5-2dsh6.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:46:58 +0000 UTC Normal Pod my-jaeger-7c7466b5d5-2dsh6.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:46:58 +0000 UTC Normal Pod my-jaeger-7c7466b5d5-2dsh6.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:46:58 +0000 UTC Normal Pod my-jaeger-7c7466b5d5-2dsh6.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:46:58 +0000 UTC Normal Pod my-jaeger-7c7466b5d5-2dsh6.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:03 +0000 UTC Normal Pod my-jaeger-7c7466b5d5-2dsh6.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:03 +0000 UTC Normal Pod my-jaeger-7c7466b5d5-2dsh6.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:03 +0000 UTC Normal ReplicaSet.apps my-jaeger-7c7466b5d5 SuccessfulDelete Deleted pod: my-jaeger-7c7466b5d5-2dsh6 replicaset-controller logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:03 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-7c7466b5d5 to 0 from 1 deployment-controller logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:04 +0000 UTC Normal Pod my-jaeger-5876985956-xv68c Binding Scheduled Successfully assigned kuttl-test-mutual-bull/my-jaeger-5876985956-xv68c to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:04 +0000 UTC Normal Pod my-jaeger-5876985956-xv68c AddedInterface Add eth0 [10.128.2.36/23] from ovn-kubernetes logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:04 +0000 UTC Normal Pod my-jaeger-5876985956-xv68c.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:04 +0000 UTC Normal ReplicaSet.apps my-jaeger-5876985956 SuccessfulCreate Created pod: my-jaeger-5876985956-xv68c replicaset-controller logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:04 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-5876985956 to 1 deployment-controller logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:05 +0000 UTC Normal Pod my-jaeger-5876985956-xv68c.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:05 +0000 UTC Normal Pod my-jaeger-5876985956-xv68c.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:05 +0000 UTC Normal Pod my-jaeger-5876985956-xv68c.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:05 +0000 UTC Normal Pod my-jaeger-5876985956-xv68c.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:05 +0000 UTC Normal Pod my-jaeger-5876985956-xv68c.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:08 +0000 UTC Normal Pod check-span-vvwwr Binding Scheduled Successfully assigned kuttl-test-mutual-bull/check-span-vvwwr to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:08 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-vvwwr job-controller logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:08 +0000 UTC Normal Pod report-span-twpnk Binding Scheduled Successfully assigned kuttl-test-mutual-bull/report-span-twpnk to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:08 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-twpnk job-controller logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:09 +0000 UTC Normal Pod check-span-vvwwr AddedInterface Add eth0 [10.131.0.32/23] from ovn-kubernetes logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:09 +0000 UTC Normal Pod check-span-vvwwr.spec.containers{asserts-container} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:09 +0000 UTC Normal Pod report-span-twpnk AddedInterface Add eth0 [10.129.2.56/23] from ovn-kubernetes logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:09 +0000 UTC Normal Pod report-span-twpnk.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:09 +0000 UTC Normal Pod report-span-twpnk.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:09 +0000 UTC Normal Pod report-span-twpnk.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:10 +0000 UTC Normal Pod check-span-vvwwr.spec.containers{asserts-container} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" in 1.39843702s (1.398449801s including waiting) kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:10 +0000 UTC Normal Pod check-span-vvwwr.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:10 +0000 UTC Normal Pod check-span-vvwwr.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:13 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmutualbullmyjaeger-1-db968776f-rfdvf Binding Scheduled Successfully assigned kuttl-test-mutual-bull/elasticsearch-cdm-kuttltestmutualbullmyjaeger-1-db968776f-rfdvf to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:17 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestmutualbullmyjaeger-1-db968776f SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestmutualbullmyjaeger-1-db968776f-rfdvf replicaset-controller logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:17 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestmutualbullmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestmutualbullmyjaeger-1-db968776f to 1 deployment-controller logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:18 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestmutualbullmyjaeger-1-db968776f-rfdvf FailedMount MountVolume.SetUp failed for volume "elasticsearch-metrics" : secret "elasticsearch-metrics" not found kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:18 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmutualbullmyjaeger-1-db968776f-rfdvf AddedInterface Add eth0 [10.131.0.33/23] from ovn-kubernetes logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:18 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmutualbullmyjaeger-1-db968776f-rfdvf.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:19 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmutualbullmyjaeger-1-db968776f-rfdvf.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:19 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmutualbullmyjaeger-1-db968776f-rfdvf.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:19 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmutualbullmyjaeger-1-db968776f-rfdvf.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:19 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmutualbullmyjaeger-1-db968776f-rfdvf.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:19 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmutualbullmyjaeger-1-db968776f-rfdvf.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:34 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestmutualbullmyjaeger-1-db968776f-rfdvf.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:42 +0000 UTC Normal Job.batch report-span Completed Job completed job-controller logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:44 +0000 UTC Normal Pod my-jaeger-5876985956-xv68c.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:44 +0000 UTC Normal Pod my-jaeger-5876985956-xv68c.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:44 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-896r6 Binding Scheduled Successfully assigned kuttl-test-mutual-bull/my-jaeger-collector-558ccfc8dd-896r6 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:44 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-558ccfc8dd SuccessfulCreate Created pod: my-jaeger-collector-558ccfc8dd-896r6 replicaset-controller logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:44 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-558ccfc8dd to 1 deployment-controller logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:44 +0000 UTC Normal Pod my-jaeger-query-59c86d77fc-2trwd Binding Scheduled Successfully assigned kuttl-test-mutual-bull/my-jaeger-query-59c86d77fc-2trwd to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:44 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-59c86d77fc SuccessfulCreate Created pod: my-jaeger-query-59c86d77fc-2trwd replicaset-controller logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:44 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-59c86d77fc to 1 deployment-controller logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:45 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-896r6 AddedInterface Add eth0 [10.128.2.37/23] from ovn-kubernetes logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:45 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-896r6.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:45 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-896r6.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:45 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-896r6.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:45 +0000 UTC Normal Pod my-jaeger-query-59c86d77fc-2trwd AddedInterface Add eth0 [10.129.2.57/23] from ovn-kubernetes logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:45 +0000 UTC Normal Pod my-jaeger-query-59c86d77fc-2trwd.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:45 +0000 UTC Normal Pod my-jaeger-query-59c86d77fc-2trwd.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:45 +0000 UTC Normal Pod my-jaeger-query-59c86d77fc-2trwd.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:45 +0000 UTC Normal Pod my-jaeger-query-59c86d77fc-2trwd.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:45 +0000 UTC Normal Pod my-jaeger-query-59c86d77fc-2trwd.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:45 +0000 UTC Normal Pod my-jaeger-query-59c86d77fc-2trwd.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:45 +0000 UTC Normal Pod my-jaeger-query-59c86d77fc-2trwd.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:45 +0000 UTC Normal Pod my-jaeger-query-59c86d77fc-2trwd.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | 2023-10-09 07:47:45 +0000 UTC Normal Pod my-jaeger-query-59c86d77fc-2trwd.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:47:56 | es-from-aio-to-production | Deleting namespace: kuttl-test-mutual-bull === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (893.86s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.24s) --- PASS: kuttl/harness/es-simple-prod (5.94s) --- PASS: kuttl/harness/es-streaming-autoprovisioned (228.05s) --- PASS: kuttl/harness/es-index-cleaner-autoprov (137.54s) --- PASS: kuttl/harness/es-rollover-autoprov (241.05s) --- PASS: kuttl/harness/es-multiinstance (96.48s) --- PASS: kuttl/harness/es-increasing-replicas (106.76s) --- PASS: kuttl/harness/es-from-aio-to-production (71.75s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name elasticsearch --report --output /logs/artifacts/elasticsearch.xml ./artifacts/kuttl-report.xml time="2023-10-09T07:48:03Z" level=debug msg="Setting a new name for the test suites" time="2023-10-09T07:48:03Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-10-09T07:48:03Z" level=debug msg="normalizing test case names" time="2023-10-09T07:48:03Z" level=debug msg="elasticsearch/artifacts -> elasticsearch_artifacts" time="2023-10-09T07:48:03Z" level=debug msg="elasticsearch/es-simple-prod -> elasticsearch_es_simple_prod" time="2023-10-09T07:48:03Z" level=debug msg="elasticsearch/es-streaming-autoprovisioned -> elasticsearch_es_streaming_autoprovisioned" time="2023-10-09T07:48:03Z" level=debug msg="elasticsearch/es-index-cleaner-autoprov -> elasticsearch_es_index_cleaner_autoprov" time="2023-10-09T07:48:03Z" level=debug msg="elasticsearch/es-rollover-autoprov -> elasticsearch_es_rollover_autoprov" time="2023-10-09T07:48:03Z" level=debug msg="elasticsearch/es-multiinstance -> elasticsearch_es_multiinstance" time="2023-10-09T07:48:03Z" level=debug msg="elasticsearch/es-increasing-replicas -> elasticsearch_es_increasing_replicas" time="2023-10-09T07:48:03Z" level=debug msg="elasticsearch/es-from-aio-to-production -> elasticsearch_es_from_aio_to_production" +--------------------------------------------+--------+ | NAME | RESULT | +--------------------------------------------+--------+ | elasticsearch_artifacts | passed | | elasticsearch_es_simple_prod | passed | | elasticsearch_es_streaming_autoprovisioned | passed | | elasticsearch_es_index_cleaner_autoprov | passed | | elasticsearch_es_rollover_autoprov | passed | | elasticsearch_es_multiinstance | passed | | elasticsearch_es_increasing_replicas | passed | | elasticsearch_es_from_aio_to_production | passed | +--------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + '[' 0 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh examples false true + '[' 3 -ne 3 ']' + test_suite_name=examples + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/examples.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-examples make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=0.32.0 \ SKIP_KAFKA=false \ VERTX_IMG=jaegertracing/vertx-create-span:operator-e2e-tests \ ./tests/e2e/examples/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-10-06-234925 True False 21m Cluster version is 4.14.0-0.nightly-2023-10-06-234925' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-10-06-234925 True False 21m Cluster version is 4.14.0-0.nightly-2023-10-06-234925' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 0.32.0 ']' ++ version_le 0.32.0 0.25.0 +++ echo 0.32.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 0.32.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/examples/render.sh ++ export SUITE_DIR=./tests/e2e/examples ++ SUITE_DIR=./tests/e2e/examples ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/examples ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test examples-agent-as-daemonset + '[' 1 -ne 1 ']' + test_name=examples-agent-as-daemonset + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-as-daemonset' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-as-daemonset\e[0m' Rendering files for test examples-agent-as-daemonset + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build + '[' _build '!=' _build ']' + mkdir -p examples-agent-as-daemonset + cd examples-agent-as-daemonset + example_name=agent-as-daemonset + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + render_install_example agent-as-daemonset 01 + '[' 2 -ne 2 ']' + example_name=agent-as-daemonset + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/agent-as-daemonset.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + JAEGER_NAME=agent-as-daemonset + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=DaemonSet ++ '[' DaemonSet = null ']' ++ echo DaemonSet ++ return 0 + jaeger_strategy=DaemonSet + '[' DaemonSet = DaemonSet ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example agent-as-daemonset 02 + '[' 2 -ne 2 ']' + example_name=agent-as-daemonset + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/agent-as-daemonset.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/agent-as-daemonset.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/agent-as-daemonset.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/agent-as-daemonset.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + jaeger_name=agent-as-daemonset + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test agent-as-daemonset true 02 + '[' 3 -ne 3 ']' + jaeger=agent-as-daemonset + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-agent-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-agent-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-with-priority-class\e[0m' Rendering files for test examples-agent-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-as-daemonset + '[' examples-agent-as-daemonset '!=' _build ']' + cd .. + mkdir -p examples-agent-with-priority-class + cd examples-agent-with-priority-class + example_name=agent-with-priority-class + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + render_install_example agent-with-priority-class 01 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/agent-with-priority-class.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + JAEGER_NAME=agent-as-daemonset + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=DaemonSet ++ '[' DaemonSet = null ']' ++ echo DaemonSet ++ return 0 + jaeger_strategy=DaemonSet + '[' DaemonSet = DaemonSet ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example agent-with-priority-class 02 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + jaeger_name=agent-as-daemonset + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test agent-as-daemonset true 02 + '[' 3 -ne 3 ']' + jaeger=agent-as-daemonset + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-all-in-one-with-options + '[' 1 -ne 1 ']' + test_name=examples-all-in-one-with-options + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-all-in-one-with-options' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-all-in-one-with-options\e[0m' Rendering files for test examples-all-in-one-with-options + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-with-priority-class + '[' examples-agent-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-all-in-one-with-options + cd examples-all-in-one-with-options + example_name=all-in-one-with-options + render_install_example all-in-one-with-options 00 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/all-in-one-with-options.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + JAEGER_NAME=my-jaeger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.metadata.name="my-jaeger"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i 'del(.spec.allInOne.image)' ./00-install.yaml + render_smoke_test_example all-in-one-with-options 01 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + jaeger_name=my-jaeger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + sed -i s~my-jaeger-query:443~my-jaeger-query:443/jaeger~gi ./01-smoke-test.yaml + '[' false = true ']' + start_test examples-auto-provision-kafka + '[' 1 -ne 1 ']' + test_name=examples-auto-provision-kafka + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-auto-provision-kafka' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-auto-provision-kafka\e[0m' Rendering files for test examples-auto-provision-kafka + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-all-in-one-with-options + '[' examples-all-in-one-with-options '!=' _build ']' + cd .. + mkdir -p examples-auto-provision-kafka + cd examples-auto-provision-kafka + example_name=auto-provision-kafka + render_install_kafka_operator 01 + '[' 1 -ne 1 ']' + test_step=01 + '[' true '!=' true ']' + render_install_example auto-provision-kafka 02 + '[' 2 -ne 2 ']' + example_name=auto-provision-kafka + test_step=02 + install_file=./02-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/auto-provision-kafka.yaml -o ./02-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./02-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./02-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./02-install.yaml ++ jaeger_name=auto-provision-kafka ++ '[' -z auto-provision-kafka ']' ++ echo auto-provision-kafka ++ return 0 + JAEGER_NAME=auto-provision-kafka + local jaeger_strategy ++ get_jaeger_strategy ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./02-install.yaml ++ strategy=streaming ++ '[' streaming = production ']' ++ '[' streaming = streaming ']' ++ echo streaming ++ return 0 + jaeger_strategy=streaming + '[' streaming = DaemonSet ']' + '[' streaming = allInOne ']' + '[' streaming = production ']' + '[' streaming = streaming ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/streaming-jaeger-assert.yaml.template -o ./02-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./02-install.yaml + mv ./02-assert.yaml ./05-assert.yaml + render_assert_kafka true auto-provision-kafka 02 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provision-kafka + test_step=02 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provision-kafka + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./02-assert.yaml ++ expr 02 + 1 + CLUSTER_NAME=auto-provision-kafka + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./3-assert.yaml ++ expr 02 + 2 + CLUSTER_NAME=auto-provision-kafka + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./04-assert.yaml + render_smoke_test_example auto-provision-kafka 06 + '[' 2 -ne 2 ']' + example_name=auto-provision-kafka + test_step=06 + deployment_file=/tmp/jaeger-tests/examples/auto-provision-kafka.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/auto-provision-kafka.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/auto-provision-kafka.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/auto-provision-kafka.yaml ++ jaeger_name=auto-provision-kafka ++ '[' -z auto-provision-kafka ']' ++ echo auto-provision-kafka ++ return 0 + jaeger_name=auto-provision-kafka + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test auto-provision-kafka true 06 + '[' 3 -ne 3 ']' + jaeger=auto-provision-kafka + is_secured=true + test_step=06 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://auto-provision-kafka-query:443 + JAEGER_QUERY_ENDPOINT=https://auto-provision-kafka-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://auto-provision-kafka-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://auto-provision-kafka-collector-headless:14268 + export JAEGER_NAME=auto-provision-kafka + JAEGER_NAME=auto-provision-kafka + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./06-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-business-application-injected-sidecar + '[' 1 -ne 1 ']' + test_name=examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-business-application-injected-sidecar' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-business-application-injected-sidecar\e[0m' Rendering files for test examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-auto-provision-kafka + '[' examples-auto-provision-kafka '!=' _build ']' + cd .. + mkdir -p examples-business-application-injected-sidecar + cd examples-business-application-injected-sidecar + example_name=simplest + cp /tmp/jaeger-tests/examples/business-application-injected-sidecar.yaml ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].image=strenv(VERTX_IMG)' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.path="/"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.port=8080' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.initialDelaySeconds=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.failureThreshold=3' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.periodSeconds=10' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.successThreshold=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.timeoutSeconds=1' ./00-install.yaml + render_install_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example simplest 02 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 02 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-collector-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-collector-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-collector-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-collector-with-priority-class\e[0m' Rendering files for test examples-collector-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-business-application-injected-sidecar + '[' examples-business-application-injected-sidecar '!=' _build ']' + cd .. + mkdir -p examples-collector-with-priority-class + cd examples-collector-with-priority-class + example_name=collector-with-priority-class + render_install_example collector-with-priority-class 00 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/collector-with-priority-class.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + JAEGER_NAME=collector-with-high-priority + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example collector-with-priority-class 01 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + jaeger_name=collector-with-high-priority + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test collector-with-high-priority true 01 + '[' 3 -ne 3 ']' + jaeger=collector-with-high-priority + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + export JAEGER_NAME=collector-with-high-priority + JAEGER_NAME=collector-with-high-priority + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-service-types + '[' 1 -ne 1 ']' + test_name=examples-service-types + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-service-types' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-service-types\e[0m' Rendering files for test examples-service-types + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-collector-with-priority-class + '[' examples-collector-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-service-types + cd examples-service-types + example_name=service-types + render_install_example service-types 00 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/service-types.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + JAEGER_NAME=service-types + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example service-types 01 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/service-types.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/service-types.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/service-types.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/service-types.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + jaeger_name=service-types + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test service-types true 01 + '[' 3 -ne 3 ']' + jaeger=service-types + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + export JAEGER_NAME=service-types + JAEGER_NAME=service-types + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod + '[' 1 -ne 1 ']' + test_name=examples-simple-prod + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod\e[0m' Rendering files for test examples-simple-prod + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-service-types + '[' examples-service-types '!=' _build ']' + cd .. + mkdir -p examples-simple-prod + cd examples-simple-prod + example_name=simple-prod + render_install_example simple-prod 01 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod 02 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod-with-volumes + '[' 1 -ne 1 ']' + test_name=examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod-with-volumes' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod-with-volumes\e[0m' Rendering files for test examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod + '[' examples-simple-prod '!=' _build ']' + cd .. + mkdir -p examples-simple-prod-with-volumes + cd examples-simple-prod-with-volumes + example_name=simple-prod-with-volumes + render_install_example simple-prod-with-volumes 01 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod-with-volumes 02 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + /tmp/jaeger-tests/bin/gomplate -f ./03-check-volume.yaml.template -o 03-check-volume.yaml + start_test examples-simplest + '[' 1 -ne 1 ']' + test_name=examples-simplest + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simplest' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simplest\e[0m' Rendering files for test examples-simplest + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod-with-volumes + '[' examples-simple-prod-with-volumes '!=' _build ']' + cd .. + mkdir -p examples-simplest + cd examples-simplest + example_name=simplest + render_install_example simplest 00 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 01 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger + '[' 1 -ne 1 ']' + test_name=examples-with-badger + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger\e[0m' Rendering files for test examples-with-badger + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simplest + '[' examples-simplest '!=' _build ']' + cd .. + mkdir -p examples-with-badger + cd examples-with-badger + example_name=with-badger + render_install_example with-badger 00 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + JAEGER_NAME=with-badger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger 01 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + jaeger_name=with-badger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + export JAEGER_NAME=with-badger + JAEGER_NAME=with-badger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger-and-volume + '[' 1 -ne 1 ']' + test_name=examples-with-badger-and-volume + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger-and-volume' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger-and-volume\e[0m' Rendering files for test examples-with-badger-and-volume + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger + '[' examples-with-badger '!=' _build ']' + cd .. + mkdir -p examples-with-badger-and-volume + cd examples-with-badger-and-volume + example_name=with-badger-and-volume + render_install_example with-badger-and-volume 00 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger-and-volume.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + JAEGER_NAME=with-badger-and-volume + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger-and-volume 01 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + jaeger_name=with-badger-and-volume + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger-and-volume true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger-and-volume + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + export JAEGER_NAME=with-badger-and-volume + JAEGER_NAME=with-badger-and-volume + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-cassandra + '[' 1 -ne 1 ']' + test_name=examples-with-cassandra + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-cassandra' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-cassandra\e[0m' Rendering files for test examples-with-cassandra + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger-and-volume + '[' examples-with-badger-and-volume '!=' _build ']' + cd .. + mkdir -p examples-with-cassandra + cd examples-with-cassandra + example_name=with-cassandra + render_install_cassandra 00 + '[' 1 -ne 1 ']' + test_step=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-assert.yaml.template -o ./00-assert.yaml + render_install_example with-cassandra 01 + '[' 2 -ne 2 ']' + example_name=with-cassandra + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-cassandra.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=with-cassandra ++ '[' -z with-cassandra ']' ++ echo with-cassandra ++ return 0 + JAEGER_NAME=with-cassandra + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example with-cassandra 02 + '[' 2 -ne 2 ']' + example_name=with-cassandra + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/with-cassandra.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-cassandra.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-cassandra.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-cassandra.yaml ++ jaeger_name=with-cassandra ++ '[' -z with-cassandra ']' ++ echo with-cassandra ++ return 0 + jaeger_name=with-cassandra + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-cassandra true 02 + '[' 3 -ne 3 ']' + jaeger=with-cassandra + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 + JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 + export JAEGER_NAME=with-cassandra + JAEGER_NAME=with-cassandra + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-sampling + '[' 1 -ne 1 ']' + test_name=examples-with-sampling + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-sampling' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-sampling\e[0m' Rendering files for test examples-with-sampling + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-cassandra + '[' examples-with-cassandra '!=' _build ']' + cd .. + mkdir -p examples-with-sampling + cd examples-with-sampling + export example_name=with-sampling + example_name=with-sampling + render_install_cassandra 00 + '[' 1 -ne 1 ']' + test_step=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-assert.yaml.template -o ./00-assert.yaml + render_install_example with-sampling 01 + '[' 2 -ne 2 ']' + example_name=with-sampling + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-sampling.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=with-sampling ++ '[' -z with-sampling ']' ++ echo with-sampling ++ return 0 + JAEGER_NAME=with-sampling + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example with-sampling 02 + '[' 2 -ne 2 ']' + example_name=with-sampling + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/with-sampling.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-sampling.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-sampling.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-sampling.yaml ++ jaeger_name=with-sampling ++ '[' -z with-sampling ']' ++ echo with-sampling ++ return 0 + jaeger_name=with-sampling + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-sampling true 02 + '[' 3 -ne 3 ']' + jaeger=with-sampling + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 + JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 + export JAEGER_NAME=with-sampling + JAEGER_NAME=with-sampling + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + start_test examples-openshift-agent-as-daemonset + '[' 1 -ne 1 ']' + test_name=examples-openshift-agent-as-daemonset + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-openshift-agent-as-daemonset' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-openshift-agent-as-daemonset\e[0m' Rendering files for test examples-openshift-agent-as-daemonset + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-sampling + '[' examples-with-sampling '!=' _build ']' + cd .. + mkdir -p examples-openshift-agent-as-daemonset + cd examples-openshift-agent-as-daemonset + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/agent-as-daemonset.yaml -o 02-install.yaml + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./02-assert.yaml + render_install_vertx 03 + '[' 1 -ne 1 ']' + test_step=03 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./03-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].env=[{"name": "JAEGER_AGENT_HOST", "valueFrom": {"fieldRef": {"apiVersion": "v1", "fieldPath": "status.hostIP"}}}]' ./03-install.yaml + render_find_service agent-as-daemonset production order 00 04 + '[' 5 -ne 5 ']' + jaeger=agent-as-daemonset + deployment_strategy=production + service_name=order + job_number=00 + test_step=04 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' production '!=' allInOne ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/find-service.yaml.template -o ./04-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + start_test examples-openshift-with-htpasswd + '[' 1 -ne 1 ']' + test_name=examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-openshift-with-htpasswd' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-openshift-with-htpasswd\e[0m' Rendering files for test examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-openshift-agent-as-daemonset + '[' examples-openshift-agent-as-daemonset '!=' _build ']' + cd .. + mkdir -p examples-openshift-with-htpasswd + cd examples-openshift-with-htpasswd + export JAEGER_NAME=with-htpasswd + JAEGER_NAME=with-htpasswd + export JAEGER_USERNAME=awesomeuser + JAEGER_USERNAME=awesomeuser + export JAEGER_PASSWORD=awesomepassword + JAEGER_PASSWORD=awesomepassword + export 'JAEGER_USER_PASSWORD_HASH=awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' + JAEGER_USER_PASSWORD_HASH='awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ echo 'awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ base64 + SECRET=YXdlc29tZXVzZXI6e1NIQX11VWRxUFZVeXFOQm1FUlUwUXhqM0tGYVpuanc9Cg== + /tmp/jaeger-tests/bin/gomplate -f ./00-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/with-htpasswd.yaml -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + export 'GET_URL_COMMAND=kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + export 'URL=https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + INSECURE=true + JAEGER_USERNAME= + JAEGER_PASSWORD= + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-unsecured.yaml + JAEGER_USERNAME=wronguser + JAEGER_PASSWORD=wrongpassword + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-check-unauthorized.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./04-check-authorized.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running examples E2E tests' Running examples E2E tests + cd tests/e2e/examples/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1500832312 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 17 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/examples-agent-as-daemonset === PAUSE kuttl/harness/examples-agent-as-daemonset === RUN kuttl/harness/examples-agent-with-priority-class === PAUSE kuttl/harness/examples-agent-with-priority-class === RUN kuttl/harness/examples-all-in-one-with-options === PAUSE kuttl/harness/examples-all-in-one-with-options === RUN kuttl/harness/examples-auto-provision-kafka === PAUSE kuttl/harness/examples-auto-provision-kafka === RUN kuttl/harness/examples-business-application-injected-sidecar === PAUSE kuttl/harness/examples-business-application-injected-sidecar === RUN kuttl/harness/examples-collector-with-priority-class === PAUSE kuttl/harness/examples-collector-with-priority-class === RUN kuttl/harness/examples-openshift-agent-as-daemonset === PAUSE kuttl/harness/examples-openshift-agent-as-daemonset === RUN kuttl/harness/examples-openshift-with-htpasswd === PAUSE kuttl/harness/examples-openshift-with-htpasswd === RUN kuttl/harness/examples-service-types === PAUSE kuttl/harness/examples-service-types === RUN kuttl/harness/examples-simple-prod === PAUSE kuttl/harness/examples-simple-prod === RUN kuttl/harness/examples-simple-prod-with-volumes === PAUSE kuttl/harness/examples-simple-prod-with-volumes === RUN kuttl/harness/examples-simplest === PAUSE kuttl/harness/examples-simplest === RUN kuttl/harness/examples-with-badger === PAUSE kuttl/harness/examples-with-badger === RUN kuttl/harness/examples-with-badger-and-volume === PAUSE kuttl/harness/examples-with-badger-and-volume === RUN kuttl/harness/examples-with-cassandra === PAUSE kuttl/harness/examples-with-cassandra === RUN kuttl/harness/examples-with-sampling === PAUSE kuttl/harness/examples-with-sampling === CONT kuttl/harness/artifacts logger.go:42: 07:48:41 | artifacts | Creating namespace: kuttl-test-accepted-puma logger.go:42: 07:48:41 | artifacts | artifacts events from ns kuttl-test-accepted-puma: logger.go:42: 07:48:41 | artifacts | Deleting namespace: kuttl-test-accepted-puma === CONT kuttl/harness/examples-service-types logger.go:42: 07:48:47 | examples-service-types | Creating namespace: kuttl-test-assuring-rhino logger.go:42: 07:48:47 | examples-service-types/0-install | starting test step 0-install logger.go:42: 07:48:47 | examples-service-types/0-install | Jaeger:kuttl-test-assuring-rhino/service-types created logger.go:42: 07:48:53 | examples-service-types/0-install | test step completed 0-install logger.go:42: 07:48:53 | examples-service-types/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:48:53 | examples-service-types/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE service-types /dev/null] logger.go:42: 07:48:56 | examples-service-types/1-smoke-test | Warning: resource jaegers/service-types is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:49:02 | examples-service-types/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://service-types-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:49:02 | examples-service-types/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:49:03 | examples-service-types/1-smoke-test | job.batch/report-span created logger.go:42: 07:49:03 | examples-service-types/1-smoke-test | job.batch/check-span created logger.go:42: 07:49:15 | examples-service-types/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:49:15 | examples-service-types/2- | starting test step 2- logger.go:42: 07:49:15 | examples-service-types/2- | test step completed 2- logger.go:42: 07:49:15 | examples-service-types | examples-service-types events from ns kuttl-test-assuring-rhino: logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:51 +0000 UTC Normal Pod service-types-6f5b5d68cf-ngkd2 Binding Scheduled Successfully assigned kuttl-test-assuring-rhino/service-types-6f5b5d68cf-ngkd2 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:51 +0000 UTC Warning Pod service-types-6f5b5d68cf-ngkd2 FailedMount MountVolume.SetUp failed for volume "service-types-collector-tls-config-volume" : secret "service-types-collector-headless-tls" not found kubelet logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:51 +0000 UTC Normal ReplicaSet.apps service-types-6f5b5d68cf SuccessfulCreate Created pod: service-types-6f5b5d68cf-ngkd2 replicaset-controller logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:51 +0000 UTC Normal Service service-types-collector EnsuringLoadBalancer Ensuring load balancer logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:51 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-6f5b5d68cf to 1 deployment-controller logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:52 +0000 UTC Normal Pod service-types-6f5b5d68cf-ngkd2 AddedInterface Add eth0 [10.128.2.38/23] from ovn-kubernetes logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:52 +0000 UTC Normal Pod service-types-6f5b5d68cf-ngkd2.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:52 +0000 UTC Normal Pod service-types-6f5b5d68cf-ngkd2.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:52 +0000 UTC Normal Pod service-types-6f5b5d68cf-ngkd2.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:52 +0000 UTC Normal Pod service-types-6f5b5d68cf-ngkd2.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:52 +0000 UTC Normal Pod service-types-6f5b5d68cf-ngkd2.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:52 +0000 UTC Normal Pod service-types-6f5b5d68cf-ngkd2.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:54 +0000 UTC Normal Service service-types-collector EnsuredLoadBalancer Ensured load balancer logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:54 +0000 UTC Normal Service service-types-query EnsuringLoadBalancer Ensuring load balancer logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:57 +0000 UTC Normal Service service-types-query EnsuredLoadBalancer Ensured load balancer logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:58 +0000 UTC Normal Pod service-types-6f5b5d68cf-ngkd2.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:58 +0000 UTC Normal Pod service-types-6f5b5d68cf-ngkd2.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:58 +0000 UTC Normal ReplicaSet.apps service-types-6f5b5d68cf SuccessfulDelete Deleted pod: service-types-6f5b5d68cf-ngkd2 replicaset-controller logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:58 +0000 UTC Normal Pod service-types-c9867fdd7-v8tmg Binding Scheduled Successfully assigned kuttl-test-assuring-rhino/service-types-c9867fdd7-v8tmg to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:58 +0000 UTC Normal ReplicaSet.apps service-types-c9867fdd7 SuccessfulCreate Created pod: service-types-c9867fdd7-v8tmg replicaset-controller logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:58 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled down replica set service-types-6f5b5d68cf to 0 from 1 deployment-controller logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:58 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-c9867fdd7 to 1 deployment-controller logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:59 +0000 UTC Normal Pod service-types-c9867fdd7-v8tmg AddedInterface Add eth0 [10.128.2.39/23] from ovn-kubernetes logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:59 +0000 UTC Normal Pod service-types-c9867fdd7-v8tmg.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:59 +0000 UTC Normal Pod service-types-c9867fdd7-v8tmg.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:59 +0000 UTC Normal Pod service-types-c9867fdd7-v8tmg.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:59 +0000 UTC Normal Pod service-types-c9867fdd7-v8tmg.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:59 +0000 UTC Normal Pod service-types-c9867fdd7-v8tmg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:48:59 +0000 UTC Normal Pod service-types-c9867fdd7-v8tmg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:49:03 +0000 UTC Normal Pod check-span-d9r56 Binding Scheduled Successfully assigned kuttl-test-assuring-rhino/check-span-d9r56 to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:49:03 +0000 UTC Normal Pod check-span-d9r56 AddedInterface Add eth0 [10.131.0.34/23] from ovn-kubernetes logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:49:03 +0000 UTC Normal Pod check-span-d9r56.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:49:03 +0000 UTC Normal Pod check-span-d9r56.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:49:03 +0000 UTC Normal Pod check-span-d9r56.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:49:03 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-d9r56 job-controller logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:49:03 +0000 UTC Normal Pod report-span-76rn9 Binding Scheduled Successfully assigned kuttl-test-assuring-rhino/report-span-76rn9 to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:49:03 +0000 UTC Normal Pod report-span-76rn9 AddedInterface Add eth0 [10.129.2.58/23] from ovn-kubernetes logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:49:03 +0000 UTC Normal Pod report-span-76rn9.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:49:03 +0000 UTC Normal Pod report-span-76rn9.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:49:03 +0000 UTC Normal Pod report-span-76rn9.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:49:03 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-76rn9 job-controller logger.go:42: 07:49:15 | examples-service-types | 2023-10-09 07:49:14 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:49:15 | examples-service-types | Deleting namespace: kuttl-test-assuring-rhino === CONT kuttl/harness/examples-with-sampling logger.go:42: 07:49:47 | examples-with-sampling | Creating namespace: kuttl-test-rich-macaw logger.go:42: 07:49:47 | examples-with-sampling/0-install | starting test step 0-install logger.go:42: 07:49:47 | examples-with-sampling/0-install | running command: [sh -c cd /tmp/jaeger-tests && make cassandra STORAGE_NAMESPACE=$NAMESPACE] logger.go:42: 07:49:47 | examples-with-sampling/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 07:49:47 | examples-with-sampling/0-install | >>>> Creating namespace kuttl-test-rich-macaw logger.go:42: 07:49:47 | examples-with-sampling/0-install | kubectl create namespace kuttl-test-rich-macaw 2>&1 | grep -v "already exists" || true logger.go:42: 07:49:47 | examples-with-sampling/0-install | kubectl create -f ./tests/cassandra.yml --namespace kuttl-test-rich-macaw 2>&1 | grep -v "already exists" || true logger.go:42: 07:49:48 | examples-with-sampling/0-install | service/cassandra created logger.go:42: 07:49:48 | examples-with-sampling/0-install | statefulset.apps/cassandra created logger.go:42: 07:49:48 | examples-with-sampling/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 07:49:59 | examples-with-sampling/0-install | test step completed 0-install logger.go:42: 07:49:59 | examples-with-sampling/1-install | starting test step 1-install logger.go:42: 07:49:59 | examples-with-sampling/1-install | Jaeger:kuttl-test-rich-macaw/with-sampling created logger.go:42: 07:50:04 | examples-with-sampling/1-install | test step completed 1-install logger.go:42: 07:50:04 | examples-with-sampling/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:50:04 | examples-with-sampling/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-sampling /dev/null] logger.go:42: 07:50:06 | examples-with-sampling/2-smoke-test | Warning: resource jaegers/with-sampling is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:50:13 | examples-with-sampling/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:50:13 | examples-with-sampling/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:50:14 | examples-with-sampling/2-smoke-test | job.batch/report-span created logger.go:42: 07:50:14 | examples-with-sampling/2-smoke-test | job.batch/check-span created logger.go:42: 07:50:25 | examples-with-sampling/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:50:25 | examples-with-sampling/3- | starting test step 3- logger.go:42: 07:50:25 | examples-with-sampling/3- | test step completed 3- logger.go:42: 07:50:25 | examples-with-sampling | examples-with-sampling events from ns kuttl-test-rich-macaw: logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:49:48 +0000 UTC Normal Pod cassandra-0 Binding Scheduled Successfully assigned kuttl-test-rich-macaw/cassandra-0 to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:49:48 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-0 in StatefulSet cassandra successful statefulset-controller logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:49:49 +0000 UTC Normal Pod cassandra-0 AddedInterface Add eth0 [10.129.2.59/23] from ovn-kubernetes logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:49:49 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulling Pulling image "cassandra:3.11" kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:49:53 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulled Successfully pulled image "cassandra:3.11" in 3.873723084s (3.873736714s including waiting) kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:49:53 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:49:53 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:49:53 +0000 UTC Normal Pod cassandra-1 Binding Scheduled Successfully assigned kuttl-test-rich-macaw/cassandra-1 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:49:53 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-1 in StatefulSet cassandra successful statefulset-controller logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:49:54 +0000 UTC Normal Pod cassandra-1 AddedInterface Add eth0 [10.128.2.40/23] from ovn-kubernetes logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:49:54 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulling Pulling image "cassandra:3.11" kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:49:57 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulled Successfully pulled image "cassandra:3.11" in 3.831791947s (3.831807977s including waiting) kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:49:58 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:49:58 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:03 +0000 UTC Normal Pod with-sampling-5747765cbf-vsp52 Binding Scheduled Successfully assigned kuttl-test-rich-macaw/with-sampling-5747765cbf-vsp52 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:03 +0000 UTC Normal Pod with-sampling-5747765cbf-vsp52 AddedInterface Add eth0 [10.128.2.41/23] from ovn-kubernetes logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:03 +0000 UTC Normal Pod with-sampling-5747765cbf-vsp52.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:03 +0000 UTC Normal Pod with-sampling-5747765cbf-vsp52.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:03 +0000 UTC Normal Pod with-sampling-5747765cbf-vsp52.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:03 +0000 UTC Normal Pod with-sampling-5747765cbf-vsp52.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:03 +0000 UTC Normal Pod with-sampling-5747765cbf-vsp52.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:03 +0000 UTC Normal Pod with-sampling-5747765cbf-vsp52.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:03 +0000 UTC Normal ReplicaSet.apps with-sampling-5747765cbf SuccessfulCreate Created pod: with-sampling-5747765cbf-vsp52 replicaset-controller logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:03 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled up replica set with-sampling-5747765cbf to 1 deployment-controller logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:10 +0000 UTC Normal Pod with-sampling-5747765cbf-vsp52.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:10 +0000 UTC Normal Pod with-sampling-5747765cbf-vsp52.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:10 +0000 UTC Normal ReplicaSet.apps with-sampling-5747765cbf SuccessfulDelete Deleted pod: with-sampling-5747765cbf-vsp52 replicaset-controller logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:10 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled down replica set with-sampling-5747765cbf to 0 from 1 deployment-controller logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:11 +0000 UTC Normal Pod with-sampling-576b4d998-9rsg5 Binding Scheduled Successfully assigned kuttl-test-rich-macaw/with-sampling-576b4d998-9rsg5 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:11 +0000 UTC Normal Pod with-sampling-576b4d998-9rsg5 AddedInterface Add eth0 [10.128.2.42/23] from ovn-kubernetes logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:11 +0000 UTC Normal Pod with-sampling-576b4d998-9rsg5.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:11 +0000 UTC Normal Pod with-sampling-576b4d998-9rsg5.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:11 +0000 UTC Normal Pod with-sampling-576b4d998-9rsg5.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:11 +0000 UTC Normal Pod with-sampling-576b4d998-9rsg5.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:11 +0000 UTC Normal Pod with-sampling-576b4d998-9rsg5.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:11 +0000 UTC Normal Pod with-sampling-576b4d998-9rsg5.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:11 +0000 UTC Normal ReplicaSet.apps with-sampling-576b4d998 SuccessfulCreate Created pod: with-sampling-576b4d998-9rsg5 replicaset-controller logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:11 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled up replica set with-sampling-576b4d998 to 1 deployment-controller logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:14 +0000 UTC Normal Pod check-span-wqscm Binding Scheduled Successfully assigned kuttl-test-rich-macaw/check-span-wqscm to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:14 +0000 UTC Normal Pod check-span-wqscm AddedInterface Add eth0 [10.129.2.60/23] from ovn-kubernetes logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:14 +0000 UTC Normal Pod check-span-wqscm.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:14 +0000 UTC Normal Pod check-span-wqscm.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:14 +0000 UTC Normal Pod check-span-wqscm.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:14 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-wqscm job-controller logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:14 +0000 UTC Normal Pod report-span-bh82v Binding Scheduled Successfully assigned kuttl-test-rich-macaw/report-span-bh82v to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:14 +0000 UTC Normal Pod report-span-bh82v AddedInterface Add eth0 [10.131.0.35/23] from ovn-kubernetes logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:14 +0000 UTC Normal Pod report-span-bh82v.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:14 +0000 UTC Normal Pod report-span-bh82v.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:14 +0000 UTC Normal Pod report-span-bh82v.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:14 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-bh82v job-controller logger.go:42: 07:50:25 | examples-with-sampling | 2023-10-09 07:50:24 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:50:25 | examples-with-sampling | Deleting namespace: kuttl-test-rich-macaw === CONT kuttl/harness/examples-with-cassandra logger.go:42: 07:50:44 | examples-with-cassandra | Creating namespace: kuttl-test-sunny-ladybug logger.go:42: 07:50:44 | examples-with-cassandra/0-install | starting test step 0-install logger.go:42: 07:50:44 | examples-with-cassandra/0-install | running command: [sh -c cd /tmp/jaeger-tests && make cassandra STORAGE_NAMESPACE=$NAMESPACE] logger.go:42: 07:50:44 | examples-with-cassandra/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 07:50:44 | examples-with-cassandra/0-install | >>>> Creating namespace kuttl-test-sunny-ladybug logger.go:42: 07:50:44 | examples-with-cassandra/0-install | kubectl create namespace kuttl-test-sunny-ladybug 2>&1 | grep -v "already exists" || true logger.go:42: 07:50:44 | examples-with-cassandra/0-install | kubectl create -f ./tests/cassandra.yml --namespace kuttl-test-sunny-ladybug 2>&1 | grep -v "already exists" || true logger.go:42: 07:50:45 | examples-with-cassandra/0-install | service/cassandra created logger.go:42: 07:50:45 | examples-with-cassandra/0-install | statefulset.apps/cassandra created logger.go:42: 07:50:45 | examples-with-cassandra/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 07:50:48 | examples-with-cassandra/0-install | test step completed 0-install logger.go:42: 07:50:48 | examples-with-cassandra/1-install | starting test step 1-install logger.go:42: 07:50:48 | examples-with-cassandra/1-install | Jaeger:kuttl-test-sunny-ladybug/with-cassandra created logger.go:42: 07:51:49 | examples-with-cassandra/1-install | test step completed 1-install logger.go:42: 07:51:49 | examples-with-cassandra/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:51:49 | examples-with-cassandra/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-cassandra /dev/null] logger.go:42: 07:51:51 | examples-with-cassandra/2-smoke-test | Warning: resource jaegers/with-cassandra is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:51:57 | examples-with-cassandra/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:51:57 | examples-with-cassandra/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:51:58 | examples-with-cassandra/2-smoke-test | job.batch/report-span created logger.go:42: 07:51:58 | examples-with-cassandra/2-smoke-test | job.batch/check-span created logger.go:42: 07:52:10 | examples-with-cassandra/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:52:10 | examples-with-cassandra | examples-with-cassandra events from ns kuttl-test-sunny-ladybug: logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:50:45 +0000 UTC Normal Pod cassandra-0 Binding Scheduled Successfully assigned kuttl-test-sunny-ladybug/cassandra-0 to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:50:45 +0000 UTC Normal Pod cassandra-0 AddedInterface Add eth0 [10.129.2.61/23] from ovn-kubernetes logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:50:45 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulled Container image "cassandra:3.11" already present on machine kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:50:45 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:50:45 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:50:45 +0000 UTC Normal Pod cassandra-1 Binding Scheduled Successfully assigned kuttl-test-sunny-ladybug/cassandra-1 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:50:45 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-0 in StatefulSet cassandra successful statefulset-controller logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:50:45 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-1 in StatefulSet cassandra successful statefulset-controller logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:50:46 +0000 UTC Normal Pod cassandra-1 AddedInterface Add eth0 [10.128.2.43/23] from ovn-kubernetes logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:50:46 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulled Container image "cassandra:3.11" already present on machine kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:50:46 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:50:46 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:50:51 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-q6275 Binding Scheduled Successfully assigned kuttl-test-sunny-ladybug/with-cassandra-cassandra-schema-job-q6275 to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:50:51 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-q6275 AddedInterface Add eth0 [10.131.0.36/23] from ovn-kubernetes logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:50:51 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-q6275.spec.containers{with-cassandra-cassandra-schema-job} Pulling Pulling image "jaegertracing/jaeger-cassandra-schema:1.47.0" kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:50:51 +0000 UTC Normal Job.batch with-cassandra-cassandra-schema-job SuccessfulCreate Created pod: with-cassandra-cassandra-schema-job-q6275 job-controller logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:50:56 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-q6275.spec.containers{with-cassandra-cassandra-schema-job} Pulled Successfully pulled image "jaegertracing/jaeger-cassandra-schema:1.47.0" in 4.467987952s (4.468002803s including waiting) kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:50:56 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-q6275.spec.containers{with-cassandra-cassandra-schema-job} Created Created container with-cassandra-cassandra-schema-job kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:50:56 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-q6275.spec.containers{with-cassandra-cassandra-schema-job} Started Started container with-cassandra-cassandra-schema-job kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:01 +0000 UTC Normal Job.batch with-cassandra-cassandra-schema-job Completed Job completed job-controller logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:02 +0000 UTC Normal Pod with-cassandra-68898fff7d-dh2b5 Binding Scheduled Successfully assigned kuttl-test-sunny-ladybug/with-cassandra-68898fff7d-dh2b5 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:02 +0000 UTC Normal ReplicaSet.apps with-cassandra-68898fff7d SuccessfulCreate Created pod: with-cassandra-68898fff7d-dh2b5 replicaset-controller logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:02 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled up replica set with-cassandra-68898fff7d to 1 deployment-controller logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:03 +0000 UTC Normal Pod with-cassandra-68898fff7d-dh2b5 AddedInterface Add eth0 [10.128.2.44/23] from ovn-kubernetes logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:03 +0000 UTC Normal Pod with-cassandra-68898fff7d-dh2b5.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:03 +0000 UTC Normal Pod with-cassandra-68898fff7d-dh2b5.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:03 +0000 UTC Normal Pod with-cassandra-68898fff7d-dh2b5.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:03 +0000 UTC Normal Pod with-cassandra-68898fff7d-dh2b5.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:03 +0000 UTC Normal Pod with-cassandra-68898fff7d-dh2b5.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:03 +0000 UTC Normal Pod with-cassandra-68898fff7d-dh2b5.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:05 +0000 UTC Warning Pod with-cassandra-68898fff7d-dh2b5.spec.containers{jaeger} BackOff Back-off restarting failed container jaeger in pod with-cassandra-68898fff7d-dh2b5_kuttl-test-sunny-ladybug(538531b1-688d-4a85-970b-8e44d49ecce6) kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:48 +0000 UTC Warning Pod with-cassandra-68898fff7d-dh2b5.spec.containers{jaeger} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:55 +0000 UTC Normal Pod with-cassandra-68898fff7d-dh2b5.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:55 +0000 UTC Normal Pod with-cassandra-68898fff7d-dh2b5.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:55 +0000 UTC Normal ReplicaSet.apps with-cassandra-68898fff7d SuccessfulDelete Deleted pod: with-cassandra-68898fff7d-dh2b5 replicaset-controller logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:55 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled down replica set with-cassandra-68898fff7d to 0 from 1 deployment-controller logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:56 +0000 UTC Normal Pod with-cassandra-97d78bd79-4nw5q Binding Scheduled Successfully assigned kuttl-test-sunny-ladybug/with-cassandra-97d78bd79-4nw5q to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:56 +0000 UTC Normal Pod with-cassandra-97d78bd79-4nw5q AddedInterface Add eth0 [10.128.2.45/23] from ovn-kubernetes logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:56 +0000 UTC Normal Pod with-cassandra-97d78bd79-4nw5q.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:56 +0000 UTC Normal Pod with-cassandra-97d78bd79-4nw5q.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:56 +0000 UTC Normal Pod with-cassandra-97d78bd79-4nw5q.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:56 +0000 UTC Normal Pod with-cassandra-97d78bd79-4nw5q.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:56 +0000 UTC Normal ReplicaSet.apps with-cassandra-97d78bd79 SuccessfulCreate Created pod: with-cassandra-97d78bd79-4nw5q replicaset-controller logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:56 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled up replica set with-cassandra-97d78bd79 to 1 deployment-controller logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:57 +0000 UTC Normal Pod with-cassandra-97d78bd79-4nw5q.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:57 +0000 UTC Normal Pod with-cassandra-97d78bd79-4nw5q.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:58 +0000 UTC Normal Pod check-span-45kvg Binding Scheduled Successfully assigned kuttl-test-sunny-ladybug/check-span-45kvg to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:58 +0000 UTC Normal Pod check-span-45kvg AddedInterface Add eth0 [10.129.2.62/23] from ovn-kubernetes logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:58 +0000 UTC Normal Pod check-span-45kvg.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:58 +0000 UTC Normal Pod check-span-45kvg.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:58 +0000 UTC Normal Pod check-span-45kvg.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:58 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-45kvg job-controller logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:58 +0000 UTC Normal Pod report-span-bhf5f Binding Scheduled Successfully assigned kuttl-test-sunny-ladybug/report-span-bhf5f to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:58 +0000 UTC Normal Pod report-span-bhf5f AddedInterface Add eth0 [10.131.0.37/23] from ovn-kubernetes logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:58 +0000 UTC Normal Pod report-span-bhf5f.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:58 +0000 UTC Normal Pod report-span-bhf5f.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:58 +0000 UTC Normal Pod report-span-bhf5f.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:51:58 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-bhf5f job-controller logger.go:42: 07:52:10 | examples-with-cassandra | 2023-10-09 07:52:10 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:52:10 | examples-with-cassandra | Deleting namespace: kuttl-test-sunny-ladybug === CONT kuttl/harness/examples-with-badger-and-volume logger.go:42: 07:52:22 | examples-with-badger-and-volume | Creating namespace: kuttl-test-large-perch logger.go:42: 07:52:22 | examples-with-badger-and-volume/0-install | starting test step 0-install logger.go:42: 07:52:22 | examples-with-badger-and-volume/0-install | Jaeger:kuttl-test-large-perch/with-badger-and-volume created logger.go:42: 07:52:28 | examples-with-badger-and-volume/0-install | test step completed 0-install logger.go:42: 07:52:28 | examples-with-badger-and-volume/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:52:28 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-badger-and-volume /dev/null] logger.go:42: 07:52:30 | examples-with-badger-and-volume/1-smoke-test | Warning: resource jaegers/with-badger-and-volume is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:52:36 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:52:36 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:52:36 | examples-with-badger-and-volume/1-smoke-test | job.batch/report-span created logger.go:42: 07:52:37 | examples-with-badger-and-volume/1-smoke-test | job.batch/check-span created logger.go:42: 07:59:37 | examples-with-badger-and-volume/1-smoke-test | test step failed 1-smoke-test case.go:364: failed in step 1-smoke-test case.go:366: --- Job:kuttl-test-large-perch/check-span +++ Job:kuttl-test-large-perch/check-span @@ -1,8 +1,142 @@ apiVersion: batch/v1 kind: Job metadata: + annotations: + batch.kubernetes.io/job-tracking: "" + kubectl.kubernetes.io/last-applied-configuration: | + {"apiVersion":"batch/v1","kind":"Job","metadata":{"annotations":{},"name":"check-span","namespace":"kuttl-test-large-perch"},"spec":{"backoffLimit":15,"template":{"spec":{"containers":[{"command":["./query"],"env":[{"name":"SERVICE_NAME","value":"smoke-test-service"},{"name":"QUERY_HOST","value":"https://with-badger-and-volume-query:443"},{"name":"SECRET_PATH","value":"/var/run/secrets/api-token/token"}],"image":"registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b","name":"asserts-container","volumeMounts":[{"mountPath":"/var/run/secrets/api-token","name":"token-api-volume"}]}],"restartPolicy":"OnFailure","volumes":[{"name":"token-api-volume","secret":{"secretName":"e2e-test"}}]}}}} + labels: + batch.kubernetes.io/controller-uid: 4b2f14d7-c716-4940-96b3-69597389ac0b + batch.kubernetes.io/job-name: check-span + controller-uid: 4b2f14d7-c716-4940-96b3-69597389ac0b + job-name: check-span + managedFields: + - apiVersion: batch/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:kubectl.kubernetes.io/last-applied-configuration: {} + f:spec: + f:backoffLimit: {} + f:completionMode: {} + f:completions: {} + f:parallelism: {} + f:suspend: {} + f:template: + f:spec: + f:containers: + k:{"name":"asserts-container"}: + .: {} + f:command: {} + f:env: + .: {} + k:{"name":"QUERY_HOST"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SECRET_PATH"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SERVICE_NAME"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:name: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/var/run/secrets/api-token"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"token-api-volume"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: kubectl-client-side-apply + operation: Update + time: "2023-10-09T07:52:37Z" + - apiVersion: batch/v1 + fieldsType: FieldsV1 + fieldsV1: + f:status: + f:active: {} + f:ready: {} + f:startTime: {} + f:uncountedTerminatedPods: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2023-10-09T07:57:56Z" name: check-span namespace: kuttl-test-large-perch +spec: + backoffLimit: 15 + completionMode: NonIndexed + completions: 1 + parallelism: 1 + selector: + matchLabels: + batch.kubernetes.io/controller-uid: 4b2f14d7-c716-4940-96b3-69597389ac0b + suspend: false + template: + metadata: + creationTimestamp: null + labels: + batch.kubernetes.io/controller-uid: 4b2f14d7-c716-4940-96b3-69597389ac0b + batch.kubernetes.io/job-name: check-span + controller-uid: 4b2f14d7-c716-4940-96b3-69597389ac0b + job-name: check-span + spec: + containers: + - command: + - ./query + env: + - name: SERVICE_NAME + value: smoke-test-service + - name: QUERY_HOST + value: https://with-badger-and-volume-query:443 + - name: SECRET_PATH + value: /var/run/secrets/api-token/token + image: registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b + imagePullPolicy: IfNotPresent + name: asserts-container + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /var/run/secrets/api-token + name: token-api-volume + dnsPolicy: ClusterFirst + restartPolicy: OnFailure + schedulerName: default-scheduler + securityContext: {} + terminationGracePeriodSeconds: 30 + volumes: + - name: token-api-volume + secret: + defaultMode: 420 + secretName: e2e-test status: - succeeded: 1 + active: 1 + ready: 1 + startTime: "2023-10-09T07:52:37Z" + uncountedTerminatedPods: {} case.go:366: resource Job:kuttl-test-large-perch/check-span: .status.succeeded: key is missing from map logger.go:42: 07:59:37 | examples-with-badger-and-volume | examples-with-badger-and-volume events from ns kuttl-test-large-perch: logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:26 +0000 UTC Normal Pod with-badger-and-volume-7dcff57565-wnd2k Binding Scheduled Successfully assigned kuttl-test-large-perch/with-badger-and-volume-7dcff57565-wnd2k to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:26 +0000 UTC Normal Pod with-badger-and-volume-7dcff57565-wnd2k AddedInterface Add eth0 [10.128.2.46/23] from ovn-kubernetes logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:26 +0000 UTC Normal Pod with-badger-and-volume-7dcff57565-wnd2k.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:26 +0000 UTC Normal Pod with-badger-and-volume-7dcff57565-wnd2k.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:26 +0000 UTC Normal Pod with-badger-and-volume-7dcff57565-wnd2k.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:26 +0000 UTC Normal Pod with-badger-and-volume-7dcff57565-wnd2k.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:26 +0000 UTC Normal Pod with-badger-and-volume-7dcff57565-wnd2k.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:26 +0000 UTC Normal Pod with-badger-and-volume-7dcff57565-wnd2k.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:26 +0000 UTC Normal ReplicaSet.apps with-badger-and-volume-7dcff57565 SuccessfulCreate Created pod: with-badger-and-volume-7dcff57565-wnd2k replicaset-controller logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:26 +0000 UTC Normal Deployment.apps with-badger-and-volume ScalingReplicaSet Scaled up replica set with-badger-and-volume-7dcff57565 to 1 deployment-controller logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:37 +0000 UTC Normal Pod check-span-q6kd7 Binding Scheduled Successfully assigned kuttl-test-large-perch/check-span-q6kd7 to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:37 +0000 UTC Normal Pod check-span-q6kd7 AddedInterface Add eth0 [10.131.0.38/23] from ovn-kubernetes logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:37 +0000 UTC Normal Pod check-span-q6kd7.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:37 +0000 UTC Normal Pod check-span-q6kd7.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:37 +0000 UTC Normal Pod check-span-q6kd7.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:37 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-q6kd7 job-controller logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:37 +0000 UTC Normal Pod report-span-7nxft Binding Scheduled Successfully assigned kuttl-test-large-perch/report-span-7nxft to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:37 +0000 UTC Normal Pod report-span-7nxft AddedInterface Add eth0 [10.129.2.63/23] from ovn-kubernetes logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:37 +0000 UTC Normal Pod report-span-7nxft.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:37 +0000 UTC Normal Pod report-span-7nxft.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:37 +0000 UTC Normal Pod report-span-7nxft.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:52:37 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-7nxft job-controller logger.go:42: 07:59:37 | examples-with-badger-and-volume | 2023-10-09 07:57:42 +0000 UTC Warning Pod check-span-q6kd7.spec.containers{asserts-container} BackOff Back-off restarting failed container asserts-container in pod check-span-q6kd7_kuttl-test-large-perch(92d0e7fe-77be-471d-81f1-e5426439eb6d) kubelet logger.go:42: 07:59:37 | examples-with-badger-and-volume | Deleting namespace: kuttl-test-large-perch === CONT kuttl/harness/examples-with-badger logger.go:42: 07:59:49 | examples-with-badger | Creating namespace: kuttl-test-wired-cattle logger.go:42: 07:59:49 | examples-with-badger/0-install | starting test step 0-install logger.go:42: 07:59:49 | examples-with-badger/0-install | Jaeger:kuttl-test-wired-cattle/with-badger created logger.go:42: 07:59:56 | examples-with-badger/0-install | test step completed 0-install logger.go:42: 07:59:56 | examples-with-badger/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:59:56 | examples-with-badger/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-badger /dev/null] logger.go:42: 07:59:57 | examples-with-badger/1-smoke-test | Warning: resource jaegers/with-badger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:00:03 | examples-with-badger/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:00:04 | examples-with-badger/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:00:04 | examples-with-badger/1-smoke-test | job.batch/report-span created logger.go:42: 08:00:04 | examples-with-badger/1-smoke-test | job.batch/check-span created logger.go:42: 08:00:16 | examples-with-badger/1-smoke-test | test step completed 1-smoke-test logger.go:42: 08:00:16 | examples-with-badger | examples-with-badger events from ns kuttl-test-wired-cattle: logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 07:59:52 +0000 UTC Normal Pod with-badger-6858654f9d-qlt64 Binding Scheduled Successfully assigned kuttl-test-wired-cattle/with-badger-6858654f9d-qlt64 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 07:59:52 +0000 UTC Normal ReplicaSet.apps with-badger-6858654f9d SuccessfulCreate Created pod: with-badger-6858654f9d-qlt64 replicaset-controller logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 07:59:52 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-6858654f9d to 1 deployment-controller logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 07:59:53 +0000 UTC Warning Pod with-badger-6858654f9d-qlt64 FailedMount MountVolume.SetUp failed for volume "with-badger-ui-oauth-proxy-tls" : secret "with-badger-ui-oauth-proxy-tls" not found kubelet logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 07:59:53 +0000 UTC Normal Pod with-badger-6858654f9d-qlt64 AddedInterface Add eth0 [10.128.2.47/23] from ovn-kubernetes logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 07:59:53 +0000 UTC Normal Pod with-badger-6858654f9d-qlt64.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 07:59:54 +0000 UTC Normal Pod with-badger-6858654f9d-qlt64.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 07:59:54 +0000 UTC Normal Pod with-badger-6858654f9d-qlt64.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 07:59:54 +0000 UTC Normal Pod with-badger-6858654f9d-qlt64.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 07:59:54 +0000 UTC Normal Pod with-badger-6858654f9d-qlt64.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 07:59:54 +0000 UTC Normal Pod with-badger-6858654f9d-qlt64.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:00 +0000 UTC Normal Pod with-badger-54945bb656-ptcql Binding Scheduled Successfully assigned kuttl-test-wired-cattle/with-badger-54945bb656-ptcql to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:00 +0000 UTC Normal ReplicaSet.apps with-badger-54945bb656 SuccessfulCreate Created pod: with-badger-54945bb656-ptcql replicaset-controller logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:00 +0000 UTC Normal Pod with-badger-6858654f9d-qlt64.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:00 +0000 UTC Normal Pod with-badger-6858654f9d-qlt64.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:00 +0000 UTC Normal ReplicaSet.apps with-badger-6858654f9d SuccessfulDelete Deleted pod: with-badger-6858654f9d-qlt64 replicaset-controller logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:00 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled down replica set with-badger-6858654f9d to 0 from 1 deployment-controller logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:00 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-54945bb656 to 1 deployment-controller logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:01 +0000 UTC Normal Pod with-badger-54945bb656-ptcql AddedInterface Add eth0 [10.128.2.49/23] from ovn-kubernetes logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:01 +0000 UTC Normal Pod with-badger-54945bb656-ptcql.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:01 +0000 UTC Normal Pod with-badger-54945bb656-ptcql.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:01 +0000 UTC Normal Pod with-badger-54945bb656-ptcql.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:01 +0000 UTC Normal Pod with-badger-54945bb656-ptcql.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:01 +0000 UTC Normal Pod with-badger-54945bb656-ptcql.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:01 +0000 UTC Normal Pod with-badger-54945bb656-ptcql.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:04 +0000 UTC Normal Pod check-span-zw4vs Binding Scheduled Successfully assigned kuttl-test-wired-cattle/check-span-zw4vs to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:04 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-zw4vs job-controller logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:04 +0000 UTC Normal Pod report-span-ljb6v Binding Scheduled Successfully assigned kuttl-test-wired-cattle/report-span-ljb6v to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:04 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-ljb6v job-controller logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:05 +0000 UTC Normal Pod check-span-zw4vs AddedInterface Add eth0 [10.131.0.39/23] from ovn-kubernetes logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:05 +0000 UTC Normal Pod check-span-zw4vs.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:05 +0000 UTC Normal Pod check-span-zw4vs.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:05 +0000 UTC Normal Pod check-span-zw4vs.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:05 +0000 UTC Normal Pod report-span-ljb6v AddedInterface Add eth0 [10.129.2.64/23] from ovn-kubernetes logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:05 +0000 UTC Normal Pod report-span-ljb6v.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:05 +0000 UTC Normal Pod report-span-ljb6v.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:05 +0000 UTC Normal Pod report-span-ljb6v.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:00:16 | examples-with-badger | 2023-10-09 08:00:16 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:00:16 | examples-with-badger | Deleting namespace: kuttl-test-wired-cattle === CONT kuttl/harness/examples-simplest logger.go:42: 08:00:28 | examples-simplest | Creating namespace: kuttl-test-huge-moray logger.go:42: 08:00:28 | examples-simplest/0-install | starting test step 0-install logger.go:42: 08:00:29 | examples-simplest/0-install | Jaeger:kuttl-test-huge-moray/simplest created logger.go:42: 08:00:35 | examples-simplest/0-install | test step completed 0-install logger.go:42: 08:00:35 | examples-simplest/1-smoke-test | starting test step 1-smoke-test logger.go:42: 08:00:35 | examples-simplest/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 08:00:36 | examples-simplest/1-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:00:42 | examples-simplest/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:00:43 | examples-simplest/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:00:43 | examples-simplest/1-smoke-test | job.batch/report-span created logger.go:42: 08:00:43 | examples-simplest/1-smoke-test | job.batch/check-span created logger.go:42: 08:00:55 | examples-simplest/1-smoke-test | test step completed 1-smoke-test logger.go:42: 08:00:55 | examples-simplest | examples-simplest events from ns kuttl-test-huge-moray: logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:32 +0000 UTC Normal Pod simplest-6c7f798649-qrh9z Binding Scheduled Successfully assigned kuttl-test-huge-moray/simplest-6c7f798649-qrh9z to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:32 +0000 UTC Normal Pod simplest-6c7f798649-qrh9z AddedInterface Add eth0 [10.128.2.50/23] from ovn-kubernetes logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:32 +0000 UTC Normal ReplicaSet.apps simplest-6c7f798649 SuccessfulCreate Created pod: simplest-6c7f798649-qrh9z replicaset-controller logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:32 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-6c7f798649 to 1 deployment-controller logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:33 +0000 UTC Normal Pod simplest-6c7f798649-qrh9z.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:33 +0000 UTC Normal Pod simplest-6c7f798649-qrh9z.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:33 +0000 UTC Normal Pod simplest-6c7f798649-qrh9z.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:33 +0000 UTC Normal Pod simplest-6c7f798649-qrh9z.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:33 +0000 UTC Normal Pod simplest-6c7f798649-qrh9z.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:33 +0000 UTC Normal Pod simplest-6c7f798649-qrh9z.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:37 +0000 UTC Normal Pod simplest-6c7f798649-qrh9z.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:37 +0000 UTC Normal Pod simplest-6c7f798649-qrh9z.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:37 +0000 UTC Normal ReplicaSet.apps simplest-6c7f798649 SuccessfulDelete Deleted pod: simplest-6c7f798649-qrh9z replicaset-controller logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:37 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-6c7f798649 to 0 from 1 deployment-controller logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:38 +0000 UTC Normal Pod simplest-55fd58f674-vm87t Binding Scheduled Successfully assigned kuttl-test-huge-moray/simplest-55fd58f674-vm87t to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:38 +0000 UTC Normal ReplicaSet.apps simplest-55fd58f674 SuccessfulCreate Created pod: simplest-55fd58f674-vm87t replicaset-controller logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:38 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-55fd58f674 to 1 deployment-controller logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:39 +0000 UTC Normal Pod simplest-55fd58f674-vm87t AddedInterface Add eth0 [10.128.2.51/23] from ovn-kubernetes logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:39 +0000 UTC Normal Pod simplest-55fd58f674-vm87t.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:39 +0000 UTC Normal Pod simplest-55fd58f674-vm87t.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:39 +0000 UTC Normal Pod simplest-55fd58f674-vm87t.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:39 +0000 UTC Normal Pod simplest-55fd58f674-vm87t.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:39 +0000 UTC Normal Pod simplest-55fd58f674-vm87t.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:39 +0000 UTC Normal Pod simplest-55fd58f674-vm87t.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:43 +0000 UTC Normal Pod check-span-gwh2p Binding Scheduled Successfully assigned kuttl-test-huge-moray/check-span-gwh2p to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:43 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-gwh2p job-controller logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:43 +0000 UTC Normal Pod report-span-cp8gq Binding Scheduled Successfully assigned kuttl-test-huge-moray/report-span-cp8gq to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:43 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-cp8gq job-controller logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:44 +0000 UTC Normal Pod check-span-gwh2p AddedInterface Add eth0 [10.131.0.40/23] from ovn-kubernetes logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:44 +0000 UTC Normal Pod check-span-gwh2p.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:44 +0000 UTC Normal Pod check-span-gwh2p.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:44 +0000 UTC Normal Pod check-span-gwh2p.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:44 +0000 UTC Normal Pod report-span-cp8gq AddedInterface Add eth0 [10.129.2.65/23] from ovn-kubernetes logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:44 +0000 UTC Normal Pod report-span-cp8gq.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:44 +0000 UTC Normal Pod report-span-cp8gq.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:44 +0000 UTC Normal Pod report-span-cp8gq.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:00:55 | examples-simplest | 2023-10-09 08:00:55 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:00:55 | examples-simplest | Deleting namespace: kuttl-test-huge-moray === CONT kuttl/harness/examples-simple-prod-with-volumes logger.go:42: 08:01:08 | examples-simple-prod-with-volumes | Ignoring 03-check-volume.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:01:08 | examples-simple-prod-with-volumes | Creating namespace: kuttl-test-famous-marlin logger.go:42: 08:01:08 | examples-simple-prod-with-volumes/1-install | starting test step 1-install logger.go:42: 08:01:08 | examples-simple-prod-with-volumes/1-install | Jaeger:kuttl-test-famous-marlin/simple-prod created logger.go:42: 08:01:43 | examples-simple-prod-with-volumes/1-install | test step completed 1-install logger.go:42: 08:01:43 | examples-simple-prod-with-volumes/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:01:43 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 08:01:44 | examples-simple-prod-with-volumes/2-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:01:51 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:01:51 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:01:52 | examples-simple-prod-with-volumes/2-smoke-test | job.batch/report-span created logger.go:42: 08:01:52 | examples-simple-prod-with-volumes/2-smoke-test | job.batch/check-span created logger.go:42: 08:02:04 | examples-simple-prod-with-volumes/2-smoke-test | test step completed 2-smoke-test logger.go:42: 08:02:04 | examples-simple-prod-with-volumes/3-check-volume | starting test step 3-check-volume logger.go:42: 08:02:04 | examples-simple-prod-with-volumes/3-check-volume | running command: [sh -c kubectl exec $(kubectl get pods -n $NAMESPACE -l app=jaeger -l app.kubernetes.io/component=collector -o yaml | /tmp/jaeger-tests/bin/yq e '.items[0].metadata.name') -n $NAMESPACE -- ls /usr/share/elasticsearch/data] logger.go:42: 08:02:04 | examples-simple-prod-with-volumes/3-check-volume | test step completed 3-check-volume logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | examples-simple-prod-with-volumes events from ns kuttl-test-famous-marlin: logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:13 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestfamousmarlinsimpleprod-1-6455bbc599 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestfamousmarlinsimpleprod-1-6455bbqgq9g replicaset-controller logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfamousmarlinsimpleprod-1-6455bbqgq9g Binding Scheduled Successfully assigned kuttl-test-famous-marlin/elasticsearch-cdm-kuttltestfamousmarlinsimpleprod-1-6455bbqgq9g to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:13 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestfamousmarlinsimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestfamousmarlinsimpleprod-1-6455bbc599 to 1 deployment-controller logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfamousmarlinsimpleprod-1-6455bbqgq9g AddedInterface Add eth0 [10.128.2.52/23] from ovn-kubernetes logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfamousmarlinsimpleprod-1-6455bbqgq9g.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfamousmarlinsimpleprod-1-6455bbqgq9g.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfamousmarlinsimpleprod-1-6455bbqgq9g.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfamousmarlinsimpleprod-1-6455bbqgq9g.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfamousmarlinsimpleprod-1-6455bbqgq9g.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfamousmarlinsimpleprod-1-6455bbqgq9g.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:24 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestfamousmarlinsimpleprod-1-6455bbqgq9g.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:29 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestfamousmarlinsimpleprod-1-6455bbqgq9g.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:40 +0000 UTC Normal Pod simple-prod-collector-6978c9cd74-252tc Binding Scheduled Successfully assigned kuttl-test-famous-marlin/simple-prod-collector-6978c9cd74-252tc to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:40 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-6978c9cd74 SuccessfulCreate Created pod: simple-prod-collector-6978c9cd74-252tc replicaset-controller logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:40 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-6978c9cd74 to 1 deployment-controller logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:40 +0000 UTC Normal Pod simple-prod-query-59cb6b6b7b-xnjnn Binding Scheduled Successfully assigned kuttl-test-famous-marlin/simple-prod-query-59cb6b6b7b-xnjnn to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:40 +0000 UTC Normal ReplicaSet.apps simple-prod-query-59cb6b6b7b SuccessfulCreate Created pod: simple-prod-query-59cb6b6b7b-xnjnn replicaset-controller logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:40 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-59cb6b6b7b to 1 deployment-controller logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:41 +0000 UTC Normal Pod simple-prod-collector-6978c9cd74-252tc AddedInterface Add eth0 [10.129.2.66/23] from ovn-kubernetes logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:41 +0000 UTC Normal Pod simple-prod-collector-6978c9cd74-252tc.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:41 +0000 UTC Normal Pod simple-prod-collector-6978c9cd74-252tc.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:41 +0000 UTC Normal Pod simple-prod-collector-6978c9cd74-252tc.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:41 +0000 UTC Normal Pod simple-prod-query-59cb6b6b7b-xnjnn AddedInterface Add eth0 [10.131.0.41/23] from ovn-kubernetes logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:41 +0000 UTC Normal Pod simple-prod-query-59cb6b6b7b-xnjnn.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:41 +0000 UTC Normal Pod simple-prod-query-59cb6b6b7b-xnjnn.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:41 +0000 UTC Normal Pod simple-prod-query-59cb6b6b7b-xnjnn.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:41 +0000 UTC Normal Pod simple-prod-query-59cb6b6b7b-xnjnn.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:41 +0000 UTC Normal Pod simple-prod-query-59cb6b6b7b-xnjnn.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:41 +0000 UTC Normal Pod simple-prod-query-59cb6b6b7b-xnjnn.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:41 +0000 UTC Normal Pod simple-prod-query-59cb6b6b7b-xnjnn.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:41 +0000 UTC Normal Pod simple-prod-query-59cb6b6b7b-xnjnn.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:41 +0000 UTC Normal Pod simple-prod-query-59cb6b6b7b-xnjnn.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:47 +0000 UTC Normal Pod simple-prod-query-59cb6b6b7b-xnjnn.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:47 +0000 UTC Normal Pod simple-prod-query-59cb6b6b7b-xnjnn.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:47 +0000 UTC Normal Pod simple-prod-query-59cb6b6b7b-xnjnn.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:47 +0000 UTC Normal ReplicaSet.apps simple-prod-query-59cb6b6b7b SuccessfulDelete Deleted pod: simple-prod-query-59cb6b6b7b-xnjnn replicaset-controller logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:47 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-59cb6b6b7b to 0 from 1 deployment-controller logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:48 +0000 UTC Normal Pod simple-prod-query-6c65b5b9b5-zkx8t Binding Scheduled Successfully assigned kuttl-test-famous-marlin/simple-prod-query-6c65b5b9b5-zkx8t to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:48 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6c65b5b9b5 SuccessfulCreate Created pod: simple-prod-query-6c65b5b9b5-zkx8t replicaset-controller logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:48 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-6c65b5b9b5 to 1 deployment-controller logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:49 +0000 UTC Normal Pod simple-prod-query-6c65b5b9b5-zkx8t AddedInterface Add eth0 [10.131.0.42/23] from ovn-kubernetes logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:49 +0000 UTC Normal Pod simple-prod-query-6c65b5b9b5-zkx8t.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:49 +0000 UTC Normal Pod simple-prod-query-6c65b5b9b5-zkx8t.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:49 +0000 UTC Normal Pod simple-prod-query-6c65b5b9b5-zkx8t.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:49 +0000 UTC Normal Pod simple-prod-query-6c65b5b9b5-zkx8t.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:49 +0000 UTC Normal Pod simple-prod-query-6c65b5b9b5-zkx8t.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:49 +0000 UTC Normal Pod simple-prod-query-6c65b5b9b5-zkx8t.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:49 +0000 UTC Normal Pod simple-prod-query-6c65b5b9b5-zkx8t.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:49 +0000 UTC Normal Pod simple-prod-query-6c65b5b9b5-zkx8t.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:49 +0000 UTC Normal Pod simple-prod-query-6c65b5b9b5-zkx8t.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:52 +0000 UTC Normal Pod check-span-r84vm Binding Scheduled Successfully assigned kuttl-test-famous-marlin/check-span-r84vm to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:52 +0000 UTC Normal Pod check-span-r84vm AddedInterface Add eth0 [10.129.2.68/23] from ovn-kubernetes logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:52 +0000 UTC Normal Pod check-span-r84vm.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:52 +0000 UTC Normal Pod check-span-r84vm.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:52 +0000 UTC Normal Pod check-span-r84vm.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:52 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-r84vm job-controller logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:52 +0000 UTC Normal Pod report-span-n44bb Binding Scheduled Successfully assigned kuttl-test-famous-marlin/report-span-n44bb to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:52 +0000 UTC Normal Pod report-span-n44bb AddedInterface Add eth0 [10.129.2.67/23] from ovn-kubernetes logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:52 +0000 UTC Normal Pod report-span-n44bb.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:52 +0000 UTC Normal Pod report-span-n44bb.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:52 +0000 UTC Normal Pod report-span-n44bb.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:52 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-n44bb job-controller logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:55 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:55 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:01:55 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | 2023-10-09 08:02:03 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:02:04 | examples-simple-prod-with-volumes | Deleting namespace: kuttl-test-famous-marlin === CONT kuttl/harness/examples-simple-prod logger.go:42: 08:02:16 | examples-simple-prod | Creating namespace: kuttl-test-united-marlin logger.go:42: 08:02:16 | examples-simple-prod/1-install | starting test step 1-install logger.go:42: 08:02:16 | examples-simple-prod/1-install | Jaeger:kuttl-test-united-marlin/simple-prod created logger.go:42: 08:02:53 | examples-simple-prod/1-install | test step completed 1-install logger.go:42: 08:02:53 | examples-simple-prod/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:02:53 | examples-simple-prod/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 08:02:56 | examples-simple-prod/2-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:03:03 | examples-simple-prod/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:03:03 | examples-simple-prod/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:03:04 | examples-simple-prod/2-smoke-test | job.batch/report-span created logger.go:42: 08:03:04 | examples-simple-prod/2-smoke-test | job.batch/check-span created logger.go:42: 08:03:16 | examples-simple-prod/2-smoke-test | test step completed 2-smoke-test logger.go:42: 08:03:16 | examples-simple-prod | examples-simple-prod events from ns kuttl-test-united-marlin: logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestunitedmarlinsimpleprod-1-7f658d4rkbf Binding Scheduled Successfully assigned kuttl-test-united-marlin/elasticsearch-cdm-kuttltestunitedmarlinsimpleprod-1-7f658d4rkbf to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:22 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestunitedmarlinsimpleprod-1-7f658d867 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestunitedmarlinsimpleprod-1-7f658d4rkbf replicaset-controller logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:22 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestunitedmarlinsimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestunitedmarlinsimpleprod-1-7f658d867 to 1 deployment-controller logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestunitedmarlinsimpleprod-1-7f658d4rkbf AddedInterface Add eth0 [10.128.2.53/23] from ovn-kubernetes logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestunitedmarlinsimpleprod-1-7f658d4rkbf.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestunitedmarlinsimpleprod-1-7f658d4rkbf.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestunitedmarlinsimpleprod-1-7f658d4rkbf.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestunitedmarlinsimpleprod-1-7f658d4rkbf.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestunitedmarlinsimpleprod-1-7f658d4rkbf.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestunitedmarlinsimpleprod-1-7f658d4rkbf.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:33 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestunitedmarlinsimpleprod-1-7f658d4rkbf.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:38 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestunitedmarlinsimpleprod-1-7f658d4rkbf.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:49 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-z8nc4 Binding Scheduled Successfully assigned kuttl-test-united-marlin/simple-prod-collector-5499b86c46-z8nc4 to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:49 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5499b86c46 SuccessfulCreate Created pod: simple-prod-collector-5499b86c46-z8nc4 replicaset-controller logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:49 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-5499b86c46 to 1 deployment-controller logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:49 +0000 UTC Normal Pod simple-prod-query-777b59684f-dcwvh Binding Scheduled Successfully assigned kuttl-test-united-marlin/simple-prod-query-777b59684f-dcwvh to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:49 +0000 UTC Normal ReplicaSet.apps simple-prod-query-777b59684f SuccessfulCreate Created pod: simple-prod-query-777b59684f-dcwvh replicaset-controller logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:49 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-777b59684f to 1 deployment-controller logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:50 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-z8nc4 AddedInterface Add eth0 [10.129.2.69/23] from ovn-kubernetes logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:50 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-z8nc4.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:50 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-z8nc4.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:50 +0000 UTC Normal Pod simple-prod-collector-5499b86c46-z8nc4.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:50 +0000 UTC Normal Pod simple-prod-query-777b59684f-dcwvh AddedInterface Add eth0 [10.131.0.43/23] from ovn-kubernetes logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:50 +0000 UTC Normal Pod simple-prod-query-777b59684f-dcwvh.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:50 +0000 UTC Normal Pod simple-prod-query-777b59684f-dcwvh.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:50 +0000 UTC Normal Pod simple-prod-query-777b59684f-dcwvh.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:50 +0000 UTC Normal Pod simple-prod-query-777b59684f-dcwvh.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:50 +0000 UTC Normal Pod simple-prod-query-777b59684f-dcwvh.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:50 +0000 UTC Normal Pod simple-prod-query-777b59684f-dcwvh.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:50 +0000 UTC Normal Pod simple-prod-query-777b59684f-dcwvh.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:50 +0000 UTC Normal Pod simple-prod-query-777b59684f-dcwvh.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:50 +0000 UTC Normal Pod simple-prod-query-777b59684f-dcwvh.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:59 +0000 UTC Normal Pod simple-prod-query-777b59684f-dcwvh.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:59 +0000 UTC Normal Pod simple-prod-query-777b59684f-dcwvh.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:59 +0000 UTC Normal Pod simple-prod-query-777b59684f-dcwvh.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:59 +0000 UTC Normal ReplicaSet.apps simple-prod-query-777b59684f SuccessfulDelete Deleted pod: simple-prod-query-777b59684f-dcwvh replicaset-controller logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:02:59 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-777b59684f to 0 from 1 deployment-controller logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:00 +0000 UTC Normal Pod simple-prod-query-9c47dd79d-rx2f7 Binding Scheduled Successfully assigned kuttl-test-united-marlin/simple-prod-query-9c47dd79d-rx2f7 to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:00 +0000 UTC Normal Pod simple-prod-query-9c47dd79d-rx2f7 AddedInterface Add eth0 [10.131.0.44/23] from ovn-kubernetes logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:00 +0000 UTC Normal Pod simple-prod-query-9c47dd79d-rx2f7.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:00 +0000 UTC Normal ReplicaSet.apps simple-prod-query-9c47dd79d SuccessfulCreate Created pod: simple-prod-query-9c47dd79d-rx2f7 replicaset-controller logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:00 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-9c47dd79d to 1 deployment-controller logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:01 +0000 UTC Normal Pod simple-prod-query-9c47dd79d-rx2f7.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:01 +0000 UTC Normal Pod simple-prod-query-9c47dd79d-rx2f7.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:01 +0000 UTC Normal Pod simple-prod-query-9c47dd79d-rx2f7.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:01 +0000 UTC Normal Pod simple-prod-query-9c47dd79d-rx2f7.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:01 +0000 UTC Normal Pod simple-prod-query-9c47dd79d-rx2f7.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:01 +0000 UTC Normal Pod simple-prod-query-9c47dd79d-rx2f7.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:01 +0000 UTC Normal Pod simple-prod-query-9c47dd79d-rx2f7.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:01 +0000 UTC Normal Pod simple-prod-query-9c47dd79d-rx2f7.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:04 +0000 UTC Normal Pod check-span-66bh2 Binding Scheduled Successfully assigned kuttl-test-united-marlin/check-span-66bh2 to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:04 +0000 UTC Normal Pod check-span-66bh2 AddedInterface Add eth0 [10.129.2.71/23] from ovn-kubernetes logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:04 +0000 UTC Normal Pod check-span-66bh2.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:04 +0000 UTC Normal Pod check-span-66bh2.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:04 +0000 UTC Normal Pod check-span-66bh2.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:04 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-66bh2 job-controller logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:04 +0000 UTC Normal Pod report-span-pqxlr Binding Scheduled Successfully assigned kuttl-test-united-marlin/report-span-pqxlr to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:04 +0000 UTC Normal Pod report-span-pqxlr AddedInterface Add eth0 [10.129.2.70/23] from ovn-kubernetes logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:04 +0000 UTC Normal Pod report-span-pqxlr.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:04 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-pqxlr job-controller logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:05 +0000 UTC Normal Pod report-span-pqxlr.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:05 +0000 UTC Normal Pod report-span-pqxlr.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:07 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:07 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:07 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:03:16 | examples-simple-prod | 2023-10-09 08:03:15 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:03:16 | examples-simple-prod | Deleting namespace: kuttl-test-united-marlin === CONT kuttl/harness/examples-business-application-injected-sidecar logger.go:42: 08:03:23 | examples-business-application-injected-sidecar | Creating namespace: kuttl-test-engaged-parrot logger.go:42: 08:03:23 | examples-business-application-injected-sidecar/0-install | starting test step 0-install logger.go:42: 08:03:23 | examples-business-application-injected-sidecar/0-install | Deployment:kuttl-test-engaged-parrot/myapp created logger.go:42: 08:03:23 | examples-business-application-injected-sidecar/0-install | test step completed 0-install logger.go:42: 08:03:23 | examples-business-application-injected-sidecar/1-install | starting test step 1-install logger.go:42: 08:03:23 | examples-business-application-injected-sidecar/1-install | Jaeger:kuttl-test-engaged-parrot/simplest created logger.go:42: 08:03:35 | examples-business-application-injected-sidecar/1-install | test step completed 1-install logger.go:42: 08:03:35 | examples-business-application-injected-sidecar/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:03:35 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 08:03:38 | examples-business-application-injected-sidecar/2-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:03:45 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:03:46 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:03:47 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/report-span created logger.go:42: 08:03:47 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/check-span created logger.go:42: 08:03:59 | examples-business-application-injected-sidecar/2-smoke-test | test step completed 2-smoke-test logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | examples-business-application-injected-sidecar events from ns kuttl-test-engaged-parrot: logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:23 +0000 UTC Normal Pod myapp-758c74659c-6d6hw Binding Scheduled Successfully assigned kuttl-test-engaged-parrot/myapp-758c74659c-6d6hw to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:23 +0000 UTC Normal ReplicaSet.apps myapp-758c74659c SuccessfulCreate Created pod: myapp-758c74659c-6d6hw replicaset-controller logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:23 +0000 UTC Normal Pod myapp-7c764668bd-rhclt Binding Scheduled Successfully assigned kuttl-test-engaged-parrot/myapp-7c764668bd-rhclt to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:23 +0000 UTC Normal ReplicaSet.apps myapp-7c764668bd SuccessfulCreate Created pod: myapp-7c764668bd-rhclt replicaset-controller logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:23 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-7c764668bd to 1 deployment-controller logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:23 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-758c74659c to 1 deployment-controller logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:24 +0000 UTC Warning Pod myapp-758c74659c-6d6hw FailedMount MountVolume.SetUp failed for volume "simplest-trusted-ca" : configmap "simplest-trusted-ca" not found kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:24 +0000 UTC Warning Pod myapp-758c74659c-6d6hw FailedMount MountVolume.SetUp failed for volume "simplest-service-ca" : configmap "simplest-service-ca" not found kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:24 +0000 UTC Normal Pod myapp-7c764668bd-rhclt AddedInterface Add eth0 [10.129.2.72/23] from ovn-kubernetes logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:24 +0000 UTC Normal Pod myapp-7c764668bd-rhclt.spec.containers{myapp} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:27 +0000 UTC Normal Pod myapp-7c764668bd-rhclt.spec.containers{myapp} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.489738841s (3.489752351s including waiting) kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:27 +0000 UTC Normal Pod myapp-7c764668bd-rhclt.spec.containers{myapp} Created Created container myapp kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:27 +0000 UTC Normal Pod myapp-7c764668bd-rhclt.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:32 +0000 UTC Normal Pod simplest-6757786d54-bck6s Binding Scheduled Successfully assigned kuttl-test-engaged-parrot/simplest-6757786d54-bck6s to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:32 +0000 UTC Normal Pod simplest-6757786d54-bck6s AddedInterface Add eth0 [10.131.0.45/23] from ovn-kubernetes logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:32 +0000 UTC Normal Pod simplest-6757786d54-bck6s.spec.containers{jaeger} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:32 +0000 UTC Normal ReplicaSet.apps simplest-6757786d54 SuccessfulCreate Created pod: simplest-6757786d54-bck6s replicaset-controller logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:32 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-6757786d54 to 1 deployment-controller logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:34 +0000 UTC Normal Pod simplest-6757786d54-bck6s.spec.containers{jaeger} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" in 2.077547398s (2.077561319s including waiting) kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:34 +0000 UTC Normal Pod simplest-6757786d54-bck6s.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:34 +0000 UTC Normal Pod simplest-6757786d54-bck6s.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:34 +0000 UTC Normal Pod simplest-6757786d54-bck6s.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:34 +0000 UTC Normal Pod simplest-6757786d54-bck6s.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:34 +0000 UTC Normal Pod simplest-6757786d54-bck6s.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:35 +0000 UTC Warning Pod myapp-7c764668bd-rhclt.spec.containers{myapp} Unhealthy Liveness probe failed: Get "http://10.129.2.72:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:39 +0000 UTC Normal Pod myapp-758c74659c-6d6hw AddedInterface Add eth0 [10.128.2.54/23] from ovn-kubernetes logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:39 +0000 UTC Normal Pod myapp-758c74659c-6d6hw.spec.containers{myapp} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:39 +0000 UTC Normal Pod simplest-6757786d54-bck6s.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:39 +0000 UTC Normal Pod simplest-6757786d54-bck6s.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:39 +0000 UTC Normal ReplicaSet.apps simplest-6757786d54 SuccessfulDelete Deleted pod: simplest-6757786d54-bck6s replicaset-controller logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:39 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-6757786d54 to 0 from 1 deployment-controller logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:40 +0000 UTC Normal Pod simplest-b8945fd8f-c22g4 Binding Scheduled Successfully assigned kuttl-test-engaged-parrot/simplest-b8945fd8f-c22g4 to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:40 +0000 UTC Normal ReplicaSet.apps simplest-b8945fd8f SuccessfulCreate Created pod: simplest-b8945fd8f-c22g4 replicaset-controller logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:40 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-b8945fd8f to 1 deployment-controller logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:41 +0000 UTC Normal Pod simplest-b8945fd8f-c22g4 AddedInterface Add eth0 [10.131.0.46/23] from ovn-kubernetes logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:41 +0000 UTC Normal Pod simplest-b8945fd8f-c22g4.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:41 +0000 UTC Normal Pod simplest-b8945fd8f-c22g4.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:41 +0000 UTC Normal Pod simplest-b8945fd8f-c22g4.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:41 +0000 UTC Normal Pod simplest-b8945fd8f-c22g4.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:41 +0000 UTC Normal Pod simplest-b8945fd8f-c22g4.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:41 +0000 UTC Normal Pod simplest-b8945fd8f-c22g4.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:43 +0000 UTC Normal Pod myapp-758c74659c-6d6hw.spec.containers{myapp} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.526733261s (3.526747731s including waiting) kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:43 +0000 UTC Normal Pod myapp-758c74659c-6d6hw.spec.containers{myapp} Created Created container myapp kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:43 +0000 UTC Normal Pod myapp-758c74659c-6d6hw.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:43 +0000 UTC Normal Pod myapp-758c74659c-6d6hw.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:43 +0000 UTC Normal Pod myapp-758c74659c-6d6hw.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:43 +0000 UTC Normal Pod myapp-758c74659c-6d6hw.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:45 +0000 UTC Normal Pod myapp-7c764668bd-rhclt.spec.containers{myapp} Killing Stopping container myapp kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:45 +0000 UTC Normal ReplicaSet.apps myapp-7c764668bd SuccessfulDelete Deleted pod: myapp-7c764668bd-rhclt replicaset-controller logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:45 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled down replica set myapp-7c764668bd to 0 from 1 deployment-controller logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:47 +0000 UTC Normal Pod check-span-8b8qr Binding Scheduled Successfully assigned kuttl-test-engaged-parrot/check-span-8b8qr to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:47 +0000 UTC Normal Pod check-span-8b8qr AddedInterface Add eth0 [10.129.2.74/23] from ovn-kubernetes logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:47 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-8b8qr job-controller logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:47 +0000 UTC Normal Pod report-span-4wwvq Binding Scheduled Successfully assigned kuttl-test-engaged-parrot/report-span-4wwvq to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:47 +0000 UTC Normal Pod report-span-4wwvq AddedInterface Add eth0 [10.129.2.73/23] from ovn-kubernetes logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:47 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-4wwvq job-controller logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:48 +0000 UTC Normal Pod check-span-8b8qr.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:48 +0000 UTC Normal Pod check-span-8b8qr.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:48 +0000 UTC Normal Pod check-span-8b8qr.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:48 +0000 UTC Normal Pod report-span-4wwvq.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:48 +0000 UTC Normal Pod report-span-4wwvq.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:48 +0000 UTC Normal Pod report-span-4wwvq.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:50 +0000 UTC Warning Pod myapp-758c74659c-6d6hw.spec.containers{myapp} Unhealthy Liveness probe failed: Get "http://10.128.2.54:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | 2023-10-09 08:03:58 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:03:59 | examples-business-application-injected-sidecar | Deleting namespace: kuttl-test-engaged-parrot === CONT kuttl/harness/examples-openshift-with-htpasswd logger.go:42: 08:04:11 | examples-openshift-with-htpasswd | Ignoring 00-install.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:04:11 | examples-openshift-with-htpasswd | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:04:11 | examples-openshift-with-htpasswd | Creating namespace: kuttl-test-deciding-clam logger.go:42: 08:04:11 | examples-openshift-with-htpasswd/0-install | starting test step 0-install logger.go:42: 08:04:11 | examples-openshift-with-htpasswd/0-install | Secret:kuttl-test-deciding-clam/htpasswd created logger.go:42: 08:04:11 | examples-openshift-with-htpasswd/0-install | test step completed 0-install logger.go:42: 08:04:11 | examples-openshift-with-htpasswd/1-install | starting test step 1-install logger.go:42: 08:04:11 | examples-openshift-with-htpasswd/1-install | Jaeger:kuttl-test-deciding-clam/with-htpasswd created logger.go:42: 08:04:18 | examples-openshift-with-htpasswd/1-install | test step completed 1-install logger.go:42: 08:04:18 | examples-openshift-with-htpasswd/2-check-unsecured | starting test step 2-check-unsecured logger.go:42: 08:04:18 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [./ensure-ingress-host.sh] logger.go:42: 08:04:18 | examples-openshift-with-htpasswd/2-check-unsecured | Checking the Ingress host value was populated logger.go:42: 08:04:18 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 0 logger.go:42: 08:04:19 | examples-openshift-with-htpasswd/2-check-unsecured | Hostname is with-htpasswd-kuttl-test-deciding-clam.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com logger.go:42: 08:04:19 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 08:04:19 | examples-openshift-with-htpasswd/2-check-unsecured | Checking an expected HTTP response logger.go:42: 08:04:19 | examples-openshift-with-htpasswd/2-check-unsecured | Running in OpenShift logger.go:42: 08:04:19 | examples-openshift-with-htpasswd/2-check-unsecured | Not using any secret logger.go:42: 08:04:19 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 1/30 the https://with-htpasswd-kuttl-test-deciding-clam.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 08:04:19 | examples-openshift-with-htpasswd/2-check-unsecured | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 08:04:19 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 2/30 the https://with-htpasswd-kuttl-test-deciding-clam.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 08:04:19 | examples-openshift-with-htpasswd/2-check-unsecured | HTTP response is 503. 403 expected. Waiting 10 s logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 3/30 the https://with-htpasswd-kuttl-test-deciding-clam.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/2-check-unsecured | curl response asserted properly logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/2-check-unsecured | test step completed 2-check-unsecured logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/3-check-unauthorized | starting test step 3-check-unauthorized logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [./ensure-ingress-host.sh] logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking the Ingress host value was populated logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 0 logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/3-check-unauthorized | Hostname is with-htpasswd-kuttl-test-deciding-clam.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [sh -c JAEGER_USERNAME=wronguser JAEGER_PASSWORD=wrongpassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking an expected HTTP response logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/3-check-unauthorized | Running in OpenShift logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/3-check-unauthorized | Using Jaeger basic authentication logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 1/30 the https://with-htpasswd-kuttl-test-deciding-clam.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/3-check-unauthorized | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 2/30 the https://with-htpasswd-kuttl-test-deciding-clam.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/3-check-unauthorized | curl response asserted properly logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/3-check-unauthorized | test step completed 3-check-unauthorized logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/4-check-authorized | starting test step 4-check-authorized logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/4-check-authorized | running command: [./ensure-ingress-host.sh] logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/4-check-authorized | Checking the Ingress host value was populated logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/4-check-authorized | Try number 0 logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/4-check-authorized | Hostname is with-htpasswd-kuttl-test-deciding-clam.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/4-check-authorized | running command: [sh -c JAEGER_USERNAME=awesomeuser JAEGER_PASSWORD=awesomepassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE with-htpasswd] logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/4-check-authorized | Checking an expected HTTP response logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/4-check-authorized | Running in OpenShift logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/4-check-authorized | Using Jaeger basic authentication logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/4-check-authorized | Try number 1/30 the https://with-htpasswd-kuttl-test-deciding-clam.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/4-check-authorized | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/4-check-authorized | Try number 2/30 the https://with-htpasswd-kuttl-test-deciding-clam.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/4-check-authorized | curl response asserted properly logger.go:42: 08:04:29 | examples-openshift-with-htpasswd/4-check-authorized | test step completed 4-check-authorized logger.go:42: 08:04:29 | examples-openshift-with-htpasswd | examples-openshift-with-htpasswd events from ns kuttl-test-deciding-clam: logger.go:42: 08:04:29 | examples-openshift-with-htpasswd | 2023-10-09 08:04:15 +0000 UTC Normal Pod with-htpasswd-5b8c9bd6-6nwlz Binding Scheduled Successfully assigned kuttl-test-deciding-clam/with-htpasswd-5b8c9bd6-6nwlz to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:04:29 | examples-openshift-with-htpasswd | 2023-10-09 08:04:15 +0000 UTC Normal ReplicaSet.apps with-htpasswd-5b8c9bd6 SuccessfulCreate Created pod: with-htpasswd-5b8c9bd6-6nwlz replicaset-controller logger.go:42: 08:04:29 | examples-openshift-with-htpasswd | 2023-10-09 08:04:15 +0000 UTC Normal Deployment.apps with-htpasswd ScalingReplicaSet Scaled up replica set with-htpasswd-5b8c9bd6 to 1 deployment-controller logger.go:42: 08:04:29 | examples-openshift-with-htpasswd | 2023-10-09 08:04:16 +0000 UTC Normal Pod with-htpasswd-5b8c9bd6-6nwlz AddedInterface Add eth0 [10.128.2.55/23] from ovn-kubernetes logger.go:42: 08:04:29 | examples-openshift-with-htpasswd | 2023-10-09 08:04:16 +0000 UTC Normal Pod with-htpasswd-5b8c9bd6-6nwlz.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:04:29 | examples-openshift-with-htpasswd | 2023-10-09 08:04:16 +0000 UTC Normal Pod with-htpasswd-5b8c9bd6-6nwlz.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:04:29 | examples-openshift-with-htpasswd | 2023-10-09 08:04:16 +0000 UTC Normal Pod with-htpasswd-5b8c9bd6-6nwlz.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:04:29 | examples-openshift-with-htpasswd | 2023-10-09 08:04:16 +0000 UTC Normal Pod with-htpasswd-5b8c9bd6-6nwlz.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:04:29 | examples-openshift-with-htpasswd | 2023-10-09 08:04:16 +0000 UTC Normal Pod with-htpasswd-5b8c9bd6-6nwlz.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:04:29 | examples-openshift-with-htpasswd | 2023-10-09 08:04:16 +0000 UTC Normal Pod with-htpasswd-5b8c9bd6-6nwlz.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:04:29 | examples-openshift-with-htpasswd | Deleting namespace: kuttl-test-deciding-clam === CONT kuttl/harness/examples-openshift-agent-as-daemonset logger.go:42: 08:04:35 | examples-openshift-agent-as-daemonset | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:04:35 | examples-openshift-agent-as-daemonset | Creating namespace: kuttl-test-artistic-silkworm logger.go:42: 08:04:35 | examples-openshift-agent-as-daemonset/0-install | starting test step 0-install logger.go:42: 08:04:35 | examples-openshift-agent-as-daemonset/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 08:04:35 | examples-openshift-agent-as-daemonset/0-install | ServiceAccount:kuttl-test-artistic-silkworm/jaeger-agent-daemonset created logger.go:42: 08:04:35 | examples-openshift-agent-as-daemonset/0-install | test step completed 0-install logger.go:42: 08:04:35 | examples-openshift-agent-as-daemonset/1-add-policy | starting test step 1-add-policy logger.go:42: 08:04:35 | examples-openshift-agent-as-daemonset/1-add-policy | running command: [sh -c oc adm policy --namespace $NAMESPACE add-scc-to-user daemonset-with-hostport -z jaeger-agent-daemonset] logger.go:42: 08:04:35 | examples-openshift-agent-as-daemonset/1-add-policy | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:daemonset-with-hostport added: "jaeger-agent-daemonset" logger.go:42: 08:04:35 | examples-openshift-agent-as-daemonset/1-add-policy | running command: [sh -c sleep 5] logger.go:42: 08:04:40 | examples-openshift-agent-as-daemonset/1-add-policy | test step completed 1-add-policy logger.go:42: 08:04:40 | examples-openshift-agent-as-daemonset/2-install | starting test step 2-install logger.go:42: 08:04:41 | examples-openshift-agent-as-daemonset/2-install | Jaeger:kuttl-test-artistic-silkworm/agent-as-daemonset created logger.go:42: 08:04:46 | examples-openshift-agent-as-daemonset/2-install | test step completed 2-install logger.go:42: 08:04:46 | examples-openshift-agent-as-daemonset/3-install | starting test step 3-install logger.go:42: 08:04:46 | examples-openshift-agent-as-daemonset/3-install | Deployment:kuttl-test-artistic-silkworm/vertx-create-span-sidecar created logger.go:42: 08:04:51 | examples-openshift-agent-as-daemonset/3-install | test step completed 3-install logger.go:42: 08:04:51 | examples-openshift-agent-as-daemonset/4-find-service | starting test step 4-find-service logger.go:42: 08:04:51 | examples-openshift-agent-as-daemonset/4-find-service | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 08:04:52 | examples-openshift-agent-as-daemonset/4-find-service | Warning: resource jaegers/agent-as-daemonset is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:04:58 | examples-openshift-agent-as-daemonset/4-find-service | running command: [sh -c SERVICE_NAME=order ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JOB_NUMBER=00 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o find-service-00-job.yaml] logger.go:42: 08:04:59 | examples-openshift-agent-as-daemonset/4-find-service | running command: [sh -c kubectl create -f find-service-00-job.yaml -n $NAMESPACE] logger.go:42: 08:04:59 | examples-openshift-agent-as-daemonset/4-find-service | job.batch/00-find-service created logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset/4-find-service | test step completed 4-find-service logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | examples-openshift-agent-as-daemonset events from ns kuttl-test-artistic-silkworm: logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:44 +0000 UTC Normal Pod agent-as-daemonset-8ddf565fd-2hbbh Binding Scheduled Successfully assigned kuttl-test-artistic-silkworm/agent-as-daemonset-8ddf565fd-2hbbh to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:44 +0000 UTC Normal Pod agent-as-daemonset-8ddf565fd-2hbbh AddedInterface Add eth0 [10.128.2.56/23] from ovn-kubernetes logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:44 +0000 UTC Normal Pod agent-as-daemonset-8ddf565fd-2hbbh.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:44 +0000 UTC Normal Pod agent-as-daemonset-8ddf565fd-2hbbh.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:44 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-8ddf565fd SuccessfulCreate Created pod: agent-as-daemonset-8ddf565fd-2hbbh replicaset-controller logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:44 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-gj498 Binding Scheduled Successfully assigned kuttl-test-artistic-silkworm/agent-as-daemonset-agent-daemonset-gj498 to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:44 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-gj498 AddedInterface Add eth0 [10.129.2.75/23] from ovn-kubernetes logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:44 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-r5jd5 Binding Scheduled Successfully assigned kuttl-test-artistic-silkworm/agent-as-daemonset-agent-daemonset-r5jd5 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:44 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-tgwl7 Binding Scheduled Successfully assigned kuttl-test-artistic-silkworm/agent-as-daemonset-agent-daemonset-tgwl7 to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:44 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-gj498 daemonset-controller logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:44 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-tgwl7 daemonset-controller logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:44 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-r5jd5 daemonset-controller logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:44 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-8ddf565fd to 1 deployment-controller logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:45 +0000 UTC Normal Pod agent-as-daemonset-8ddf565fd-2hbbh.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:45 +0000 UTC Normal Pod agent-as-daemonset-8ddf565fd-2hbbh.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:45 +0000 UTC Normal Pod agent-as-daemonset-8ddf565fd-2hbbh.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:45 +0000 UTC Normal Pod agent-as-daemonset-8ddf565fd-2hbbh.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:45 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-gj498.spec.containers{jaeger-agent-daemonset} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:45 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-gj498.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:45 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-gj498.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:45 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-r5jd5 AddedInterface Add eth0 [10.128.2.57/23] from ovn-kubernetes logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:45 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-r5jd5.spec.containers{jaeger-agent-daemonset} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:45 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-r5jd5.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:45 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-r5jd5.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:45 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-tgwl7 AddedInterface Add eth0 [10.131.0.47/23] from ovn-kubernetes logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:45 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-tgwl7.spec.containers{jaeger-agent-daemonset} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:45 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-tgwl7.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:45 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-tgwl7.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:46 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-52lgh Binding Scheduled Successfully assigned kuttl-test-artistic-silkworm/vertx-create-span-sidecar-54946f4fd-52lgh to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:46 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-52lgh AddedInterface Add eth0 [10.131.0.48/23] from ovn-kubernetes logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:46 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-52lgh.spec.containers{vertx-create-span-sidecar} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:46 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-54946f4fd SuccessfulCreate Created pod: vertx-create-span-sidecar-54946f4fd-52lgh replicaset-controller logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:46 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-54946f4fd to 1 deployment-controller logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:50 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-52lgh.spec.containers{vertx-create-span-sidecar} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.510221615s (3.510238346s including waiting) kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:50 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-52lgh.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:50 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-52lgh.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:53 +0000 UTC Normal Pod agent-as-daemonset-8ddf565fd-2hbbh.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:53 +0000 UTC Normal Pod agent-as-daemonset-8ddf565fd-2hbbh.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:53 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-8ddf565fd SuccessfulDelete Deleted pod: agent-as-daemonset-8ddf565fd-2hbbh replicaset-controller logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:53 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled down replica set agent-as-daemonset-8ddf565fd to 0 from 1 deployment-controller logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:54 +0000 UTC Normal Pod agent-as-daemonset-c66d67f75-bslql Binding Scheduled Successfully assigned kuttl-test-artistic-silkworm/agent-as-daemonset-c66d67f75-bslql to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:54 +0000 UTC Normal Pod agent-as-daemonset-c66d67f75-bslql AddedInterface Add eth0 [10.128.2.58/23] from ovn-kubernetes logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:54 +0000 UTC Normal Pod agent-as-daemonset-c66d67f75-bslql.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:54 +0000 UTC Normal Pod agent-as-daemonset-c66d67f75-bslql.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:54 +0000 UTC Normal Pod agent-as-daemonset-c66d67f75-bslql.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:54 +0000 UTC Normal Pod agent-as-daemonset-c66d67f75-bslql.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:54 +0000 UTC Normal Pod agent-as-daemonset-c66d67f75-bslql.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:54 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-c66d67f75 SuccessfulCreate Created pod: agent-as-daemonset-c66d67f75-bslql replicaset-controller logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:54 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-c66d67f75 to 1 deployment-controller logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:55 +0000 UTC Normal Pod agent-as-daemonset-c66d67f75-bslql.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:58 +0000 UTC Warning Pod vertx-create-span-sidecar-54946f4fd-52lgh.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.48:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:58 +0000 UTC Warning Pod vertx-create-span-sidecar-54946f4fd-52lgh.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.48:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:59 +0000 UTC Normal Pod 00-find-service-kfkpk Binding Scheduled Successfully assigned kuttl-test-artistic-silkworm/00-find-service-kfkpk to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:04:59 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-kfkpk job-controller logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:05:00 +0000 UTC Normal Pod 00-find-service-kfkpk AddedInterface Add eth0 [10.129.2.76/23] from ovn-kubernetes logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:05:00 +0000 UTC Normal Pod 00-find-service-kfkpk.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:05:00 +0000 UTC Normal Pod 00-find-service-kfkpk.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:05:00 +0000 UTC Normal Pod 00-find-service-kfkpk.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:05:00 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-52lgh.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:05:00 +0000 UTC Warning Pod vertx-create-span-sidecar-54946f4fd-52lgh.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.48:8080/": read tcp 10.131.0.2:45144->10.131.0.48:8080: read: connection reset by peer kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:05:00 +0000 UTC Warning Pod vertx-create-span-sidecar-54946f4fd-52lgh.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.48:8080/": dial tcp 10.131.0.48:8080: connect: connection refused kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:05:01 +0000 UTC Normal Pod vertx-create-span-sidecar-54946f4fd-52lgh.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:05:11 +0000 UTC Warning Pod vertx-create-span-sidecar-54946f4fd-52lgh.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.48:8080/": read tcp 10.131.0.2:49770->10.131.0.48:8080: read: connection reset by peer kubelet logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | 2023-10-09 08:05:18 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 08:05:19 | examples-openshift-agent-as-daemonset | Deleting namespace: kuttl-test-artistic-silkworm === CONT kuttl/harness/examples-collector-with-priority-class logger.go:42: 08:05:26 | examples-collector-with-priority-class | Creating namespace: kuttl-test-logical-cheetah logger.go:42: 08:05:26 | examples-collector-with-priority-class/0-install | starting test step 0-install logger.go:42: 08:05:26 | examples-collector-with-priority-class/0-install | PriorityClass:/collector-high-priority created logger.go:42: 08:05:26 | examples-collector-with-priority-class/0-install | Jaeger:kuttl-test-logical-cheetah/collector-with-high-priority created logger.go:42: 08:05:32 | examples-collector-with-priority-class/0-install | test step completed 0-install logger.go:42: 08:05:32 | examples-collector-with-priority-class/1-smoke-test | starting test step 1-smoke-test logger.go:42: 08:05:32 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE collector-with-high-priority /dev/null] logger.go:42: 08:05:33 | examples-collector-with-priority-class/1-smoke-test | Warning: resource jaegers/collector-with-high-priority is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:05:40 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:05:40 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:05:40 | examples-collector-with-priority-class/1-smoke-test | job.batch/report-span created logger.go:42: 08:05:40 | examples-collector-with-priority-class/1-smoke-test | job.batch/check-span created logger.go:42: 08:05:52 | examples-collector-with-priority-class/1-smoke-test | test step completed 1-smoke-test logger.go:42: 08:05:52 | examples-collector-with-priority-class | examples-collector-with-priority-class events from ns kuttl-test-logical-cheetah: logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:29 +0000 UTC Normal Pod collector-with-high-priority-56d6f9d68-2tdmn Binding Scheduled Successfully assigned kuttl-test-logical-cheetah/collector-with-high-priority-56d6f9d68-2tdmn to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:29 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-56d6f9d68 SuccessfulCreate Created pod: collector-with-high-priority-56d6f9d68-2tdmn replicaset-controller logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:29 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled up replica set collector-with-high-priority-56d6f9d68 to 1 deployment-controller logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:30 +0000 UTC Normal Pod collector-with-high-priority-56d6f9d68-2tdmn AddedInterface Add eth0 [10.128.2.59/23] from ovn-kubernetes logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:30 +0000 UTC Normal Pod collector-with-high-priority-56d6f9d68-2tdmn.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:30 +0000 UTC Normal Pod collector-with-high-priority-56d6f9d68-2tdmn.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:30 +0000 UTC Normal Pod collector-with-high-priority-56d6f9d68-2tdmn.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:30 +0000 UTC Normal Pod collector-with-high-priority-56d6f9d68-2tdmn.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:30 +0000 UTC Normal Pod collector-with-high-priority-56d6f9d68-2tdmn.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:30 +0000 UTC Normal Pod collector-with-high-priority-56d6f9d68-2tdmn.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:35 +0000 UTC Normal Pod collector-with-high-priority-56d6f9d68-2tdmn.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:35 +0000 UTC Normal Pod collector-with-high-priority-56d6f9d68-2tdmn.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:35 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-56d6f9d68 SuccessfulDelete Deleted pod: collector-with-high-priority-56d6f9d68-2tdmn replicaset-controller logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:35 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled down replica set collector-with-high-priority-56d6f9d68 to 0 from 1 deployment-controller logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:36 +0000 UTC Normal Pod collector-with-high-priority-54477c5d7f-nvprw Binding Scheduled Successfully assigned kuttl-test-logical-cheetah/collector-with-high-priority-54477c5d7f-nvprw to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:36 +0000 UTC Normal Pod collector-with-high-priority-54477c5d7f-nvprw AddedInterface Add eth0 [10.128.2.60/23] from ovn-kubernetes logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:36 +0000 UTC Normal Pod collector-with-high-priority-54477c5d7f-nvprw.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:36 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-54477c5d7f SuccessfulCreate Created pod: collector-with-high-priority-54477c5d7f-nvprw replicaset-controller logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:36 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled up replica set collector-with-high-priority-54477c5d7f to 1 deployment-controller logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:37 +0000 UTC Normal Pod collector-with-high-priority-54477c5d7f-nvprw.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:37 +0000 UTC Normal Pod collector-with-high-priority-54477c5d7f-nvprw.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:37 +0000 UTC Normal Pod collector-with-high-priority-54477c5d7f-nvprw.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:37 +0000 UTC Normal Pod collector-with-high-priority-54477c5d7f-nvprw.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:37 +0000 UTC Normal Pod collector-with-high-priority-54477c5d7f-nvprw.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:40 +0000 UTC Normal Pod check-span-jn7fb Binding Scheduled Successfully assigned kuttl-test-logical-cheetah/check-span-jn7fb to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:40 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-jn7fb job-controller logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:40 +0000 UTC Normal Pod report-span-dl8s5 Binding Scheduled Successfully assigned kuttl-test-logical-cheetah/report-span-dl8s5 to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:40 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-dl8s5 job-controller logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:41 +0000 UTC Normal Pod check-span-jn7fb AddedInterface Add eth0 [10.131.0.49/23] from ovn-kubernetes logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:41 +0000 UTC Normal Pod check-span-jn7fb.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:41 +0000 UTC Normal Pod check-span-jn7fb.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:41 +0000 UTC Normal Pod check-span-jn7fb.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:41 +0000 UTC Normal Pod report-span-dl8s5 AddedInterface Add eth0 [10.129.2.77/23] from ovn-kubernetes logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:41 +0000 UTC Normal Pod report-span-dl8s5.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:41 +0000 UTC Normal Pod report-span-dl8s5.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:41 +0000 UTC Normal Pod report-span-dl8s5.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:05:52 | examples-collector-with-priority-class | 2023-10-09 08:05:52 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:05:52 | examples-collector-with-priority-class | Deleting namespace: kuttl-test-logical-cheetah === CONT kuttl/harness/examples-agent-with-priority-class logger.go:42: 08:06:04 | examples-agent-with-priority-class | Creating namespace: kuttl-test-endless-seagull logger.go:42: 08:06:04 | examples-agent-with-priority-class/0-install | starting test step 0-install logger.go:42: 08:06:05 | examples-agent-with-priority-class/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 08:06:05 | examples-agent-with-priority-class/0-install | ServiceAccount:kuttl-test-endless-seagull/jaeger-agent-daemonset created logger.go:42: 08:06:05 | examples-agent-with-priority-class/0-install | test step completed 0-install logger.go:42: 08:06:05 | examples-agent-with-priority-class/1-install | starting test step 1-install logger.go:42: 08:06:05 | examples-agent-with-priority-class/1-install | PriorityClass:/high-priority created logger.go:42: 08:06:05 | examples-agent-with-priority-class/1-install | Jaeger:kuttl-test-endless-seagull/agent-as-daemonset created logger.go:42: 08:06:11 | examples-agent-with-priority-class/1-install | test step completed 1-install logger.go:42: 08:06:11 | examples-agent-with-priority-class/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:06:11 | examples-agent-with-priority-class/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 08:06:12 | examples-agent-with-priority-class/2-smoke-test | Warning: resource jaegers/agent-as-daemonset is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:06:18 | examples-agent-with-priority-class/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:06:19 | examples-agent-with-priority-class/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:06:19 | examples-agent-with-priority-class/2-smoke-test | job.batch/report-span created logger.go:42: 08:06:19 | examples-agent-with-priority-class/2-smoke-test | job.batch/check-span created logger.go:42: 08:13:20 | examples-agent-with-priority-class/2-smoke-test | test step failed 2-smoke-test case.go:364: failed in step 2-smoke-test case.go:366: --- Job:kuttl-test-endless-seagull/check-span +++ Job:kuttl-test-endless-seagull/check-span @@ -1,8 +1,142 @@ apiVersion: batch/v1 kind: Job metadata: + annotations: + batch.kubernetes.io/job-tracking: "" + kubectl.kubernetes.io/last-applied-configuration: | + {"apiVersion":"batch/v1","kind":"Job","metadata":{"annotations":{},"name":"check-span","namespace":"kuttl-test-endless-seagull"},"spec":{"backoffLimit":15,"template":{"spec":{"containers":[{"command":["./query"],"env":[{"name":"SERVICE_NAME","value":"smoke-test-service"},{"name":"QUERY_HOST","value":"https://agent-as-daemonset-query:443"},{"name":"SECRET_PATH","value":"/var/run/secrets/api-token/token"}],"image":"registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b","name":"asserts-container","volumeMounts":[{"mountPath":"/var/run/secrets/api-token","name":"token-api-volume"}]}],"restartPolicy":"OnFailure","volumes":[{"name":"token-api-volume","secret":{"secretName":"e2e-test"}}]}}}} + labels: + batch.kubernetes.io/controller-uid: 1692dfab-62dc-4ea7-a47b-531897b336a9 + batch.kubernetes.io/job-name: check-span + controller-uid: 1692dfab-62dc-4ea7-a47b-531897b336a9 + job-name: check-span + managedFields: + - apiVersion: batch/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:kubectl.kubernetes.io/last-applied-configuration: {} + f:spec: + f:backoffLimit: {} + f:completionMode: {} + f:completions: {} + f:parallelism: {} + f:suspend: {} + f:template: + f:spec: + f:containers: + k:{"name":"asserts-container"}: + .: {} + f:command: {} + f:env: + .: {} + k:{"name":"QUERY_HOST"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SECRET_PATH"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SERVICE_NAME"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:name: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/var/run/secrets/api-token"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"token-api-volume"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: kubectl-client-side-apply + operation: Update + time: "2023-10-09T08:06:19Z" + - apiVersion: batch/v1 + fieldsType: FieldsV1 + fieldsV1: + f:status: + f:active: {} + f:ready: {} + f:startTime: {} + f:uncountedTerminatedPods: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2023-10-09T08:11:39Z" name: check-span namespace: kuttl-test-endless-seagull +spec: + backoffLimit: 15 + completionMode: NonIndexed + completions: 1 + parallelism: 1 + selector: + matchLabels: + batch.kubernetes.io/controller-uid: 1692dfab-62dc-4ea7-a47b-531897b336a9 + suspend: false + template: + metadata: + creationTimestamp: null + labels: + batch.kubernetes.io/controller-uid: 1692dfab-62dc-4ea7-a47b-531897b336a9 + batch.kubernetes.io/job-name: check-span + controller-uid: 1692dfab-62dc-4ea7-a47b-531897b336a9 + job-name: check-span + spec: + containers: + - command: + - ./query + env: + - name: SERVICE_NAME + value: smoke-test-service + - name: QUERY_HOST + value: https://agent-as-daemonset-query:443 + - name: SECRET_PATH + value: /var/run/secrets/api-token/token + image: registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b + imagePullPolicy: IfNotPresent + name: asserts-container + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /var/run/secrets/api-token + name: token-api-volume + dnsPolicy: ClusterFirst + restartPolicy: OnFailure + schedulerName: default-scheduler + securityContext: {} + terminationGracePeriodSeconds: 30 + volumes: + - name: token-api-volume + secret: + defaultMode: 420 + secretName: e2e-test status: - succeeded: 1 + active: 1 + ready: 1 + startTime: "2023-10-09T08:06:19Z" + uncountedTerminatedPods: {} case.go:366: resource Job:kuttl-test-endless-seagull/check-span: .status.succeeded: key is missing from map logger.go:42: 08:13:20 | examples-agent-with-priority-class | examples-agent-with-priority-class events from ns kuttl-test-endless-seagull: logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:08 +0000 UTC Normal Pod agent-as-daemonset-5759fb49-d7srd Binding Scheduled Successfully assigned kuttl-test-endless-seagull/agent-as-daemonset-5759fb49-d7srd to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:08 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-5759fb49 SuccessfulCreate Created pod: agent-as-daemonset-5759fb49-d7srd replicaset-controller logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:08 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-5759fb49 to 1 deployment-controller logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:09 +0000 UTC Normal Pod agent-as-daemonset-5759fb49-d7srd AddedInterface Add eth0 [10.128.2.61/23] from ovn-kubernetes logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:09 +0000 UTC Normal Pod agent-as-daemonset-5759fb49-d7srd.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:09 +0000 UTC Normal Pod agent-as-daemonset-5759fb49-d7srd.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:09 +0000 UTC Normal Pod agent-as-daemonset-5759fb49-d7srd.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:09 +0000 UTC Normal Pod agent-as-daemonset-5759fb49-d7srd.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:09 +0000 UTC Normal Pod agent-as-daemonset-5759fb49-d7srd.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:09 +0000 UTC Normal Pod agent-as-daemonset-5759fb49-d7srd.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:10 +0000 UTC Warning DaemonSet.apps agent-as-daemonset-agent-daemonset FailedCreate Error creating: pods "agent-as-daemonset-agent-daemonset-" is forbidden: unable to validate against any security context constraint: [provider "anyuid": Forbidden: not usable by user or serviceaccount, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5775: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5778: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6831: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6832: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 14271: Host ports are not allowed to be used, provider "restricted": Forbidden: not usable by user or serviceaccount, provider "nonroot-v2": Forbidden: not usable by user or serviceaccount, provider "nonroot": Forbidden: not usable by user or serviceaccount, provider "hostmount-anyuid": Forbidden: not usable by user or serviceaccount, provider "elasticsearch-scc": Forbidden: not usable by user or serviceaccount, provider "machine-api-termination-handler": Forbidden: not usable by user or serviceaccount, provider "daemonset-with-hostport": Forbidden: not usable by user or serviceaccount, provider "hostnetwork-v2": Forbidden: not usable by user or serviceaccount, provider "hostnetwork": Forbidden: not usable by user or serviceaccount, provider "hostaccess": Forbidden: not usable by user or serviceaccount, provider "node-exporter": Forbidden: not usable by user or serviceaccount, provider "privileged": Forbidden: not usable by user or serviceaccount] daemonset-controller logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:19 +0000 UTC Normal Pod check-span-4dltd Binding Scheduled Successfully assigned kuttl-test-endless-seagull/check-span-4dltd to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:19 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-4dltd job-controller logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:19 +0000 UTC Normal Pod report-span-wmpjx Binding Scheduled Successfully assigned kuttl-test-endless-seagull/report-span-wmpjx to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:19 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-wmpjx job-controller logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:20 +0000 UTC Normal Pod check-span-4dltd AddedInterface Add eth0 [10.131.0.50/23] from ovn-kubernetes logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:20 +0000 UTC Normal Pod check-span-4dltd.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:20 +0000 UTC Normal Pod check-span-4dltd.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:20 +0000 UTC Normal Pod check-span-4dltd.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:20 +0000 UTC Normal Pod report-span-wmpjx AddedInterface Add eth0 [10.129.2.78/23] from ovn-kubernetes logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:20 +0000 UTC Normal Pod report-span-wmpjx.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:20 +0000 UTC Normal Pod report-span-wmpjx.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:06:20 +0000 UTC Normal Pod report-span-wmpjx.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:13:20 | examples-agent-with-priority-class | 2023-10-09 08:11:25 +0000 UTC Warning Pod check-span-4dltd.spec.containers{asserts-container} BackOff Back-off restarting failed container asserts-container in pod check-span-4dltd_kuttl-test-endless-seagull(5ffd0f91-0e9c-4d84-a75f-1d01ca051a8b) kubelet logger.go:42: 08:13:20 | examples-agent-with-priority-class | Deleting namespace: kuttl-test-endless-seagull === CONT kuttl/harness/examples-agent-as-daemonset logger.go:42: 08:13:27 | examples-agent-as-daemonset | Creating namespace: kuttl-test-vocal-spider logger.go:42: 08:13:27 | examples-agent-as-daemonset/0-install | starting test step 0-install logger.go:42: 08:13:27 | examples-agent-as-daemonset/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 08:13:27 | examples-agent-as-daemonset/0-install | ServiceAccount:kuttl-test-vocal-spider/jaeger-agent-daemonset created logger.go:42: 08:13:27 | examples-agent-as-daemonset/0-install | test step completed 0-install logger.go:42: 08:13:27 | examples-agent-as-daemonset/1-install | starting test step 1-install logger.go:42: 08:13:27 | examples-agent-as-daemonset/1-install | Jaeger:kuttl-test-vocal-spider/agent-as-daemonset created logger.go:42: 08:13:33 | examples-agent-as-daemonset/1-install | test step completed 1-install logger.go:42: 08:13:33 | examples-agent-as-daemonset/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:13:33 | examples-agent-as-daemonset/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 08:13:34 | examples-agent-as-daemonset/2-smoke-test | Warning: resource jaegers/agent-as-daemonset is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:13:41 | examples-agent-as-daemonset/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:13:42 | examples-agent-as-daemonset/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:13:43 | examples-agent-as-daemonset/2-smoke-test | job.batch/report-span created logger.go:42: 08:13:43 | examples-agent-as-daemonset/2-smoke-test | job.batch/check-span created logger.go:42: 08:13:55 | examples-agent-as-daemonset/2-smoke-test | test step completed 2-smoke-test logger.go:42: 08:13:55 | examples-agent-as-daemonset | examples-agent-as-daemonset events from ns kuttl-test-vocal-spider: logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:30 +0000 UTC Normal Pod agent-as-daemonset-5c4dcb96d5-jl46h Binding Scheduled Successfully assigned kuttl-test-vocal-spider/agent-as-daemonset-5c4dcb96d5-jl46h to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:30 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-5c4dcb96d5 SuccessfulCreate Created pod: agent-as-daemonset-5c4dcb96d5-jl46h replicaset-controller logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:30 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-5c4dcb96d5 to 1 deployment-controller logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:31 +0000 UTC Normal Pod agent-as-daemonset-5c4dcb96d5-jl46h AddedInterface Add eth0 [10.128.2.62/23] from ovn-kubernetes logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:31 +0000 UTC Normal Pod agent-as-daemonset-5c4dcb96d5-jl46h.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:31 +0000 UTC Normal Pod agent-as-daemonset-5c4dcb96d5-jl46h.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:31 +0000 UTC Normal Pod agent-as-daemonset-5c4dcb96d5-jl46h.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:31 +0000 UTC Normal Pod agent-as-daemonset-5c4dcb96d5-jl46h.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:31 +0000 UTC Normal Pod agent-as-daemonset-5c4dcb96d5-jl46h.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:31 +0000 UTC Normal Pod agent-as-daemonset-5c4dcb96d5-jl46h.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:32 +0000 UTC Warning DaemonSet.apps agent-as-daemonset-agent-daemonset FailedCreate Error creating: pods "agent-as-daemonset-agent-daemonset-" is forbidden: unable to validate against any security context constraint: [provider "anyuid": Forbidden: not usable by user or serviceaccount, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5775: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5778: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6831: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6832: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 14271: Host ports are not allowed to be used, provider "restricted": Forbidden: not usable by user or serviceaccount, provider "nonroot-v2": Forbidden: not usable by user or serviceaccount, provider "nonroot": Forbidden: not usable by user or serviceaccount, provider "hostmount-anyuid": Forbidden: not usable by user or serviceaccount, provider "elasticsearch-scc": Forbidden: not usable by user or serviceaccount, provider "machine-api-termination-handler": Forbidden: not usable by user or serviceaccount, provider "daemonset-with-hostport": Forbidden: not usable by user or serviceaccount, provider "hostnetwork-v2": Forbidden: not usable by user or serviceaccount, provider "hostnetwork": Forbidden: not usable by user or serviceaccount, provider "hostaccess": Forbidden: not usable by user or serviceaccount, provider "node-exporter": Forbidden: not usable by user or serviceaccount, provider "privileged": Forbidden: not usable by user or serviceaccount] daemonset-controller logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:38 +0000 UTC Normal Pod agent-as-daemonset-5c4dcb96d5-jl46h.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:38 +0000 UTC Normal Pod agent-as-daemonset-5c4dcb96d5-jl46h.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:38 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-5c4dcb96d5 SuccessfulDelete Deleted pod: agent-as-daemonset-5c4dcb96d5-jl46h replicaset-controller logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:38 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled down replica set agent-as-daemonset-5c4dcb96d5 to 0 from 1 deployment-controller logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:39 +0000 UTC Normal Pod agent-as-daemonset-6db589f557-vsjzw Binding Scheduled Successfully assigned kuttl-test-vocal-spider/agent-as-daemonset-6db589f557-vsjzw to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:39 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-6db589f557 SuccessfulCreate Created pod: agent-as-daemonset-6db589f557-vsjzw replicaset-controller logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:39 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-6db589f557 to 1 deployment-controller logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:40 +0000 UTC Normal Pod agent-as-daemonset-6db589f557-vsjzw AddedInterface Add eth0 [10.128.2.63/23] from ovn-kubernetes logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:40 +0000 UTC Normal Pod agent-as-daemonset-6db589f557-vsjzw.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:40 +0000 UTC Normal Pod agent-as-daemonset-6db589f557-vsjzw.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:40 +0000 UTC Normal Pod agent-as-daemonset-6db589f557-vsjzw.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:40 +0000 UTC Normal Pod agent-as-daemonset-6db589f557-vsjzw.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:40 +0000 UTC Normal Pod agent-as-daemonset-6db589f557-vsjzw.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:40 +0000 UTC Normal Pod agent-as-daemonset-6db589f557-vsjzw.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:43 +0000 UTC Normal Pod check-span-4z5gh Binding Scheduled Successfully assigned kuttl-test-vocal-spider/check-span-4z5gh to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:43 +0000 UTC Normal Pod check-span-4z5gh AddedInterface Add eth0 [10.131.0.51/23] from ovn-kubernetes logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:43 +0000 UTC Normal Pod check-span-4z5gh.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:43 +0000 UTC Normal Pod check-span-4z5gh.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:43 +0000 UTC Normal Pod check-span-4z5gh.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:43 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-4z5gh job-controller logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:43 +0000 UTC Normal Pod report-span-pskp4 Binding Scheduled Successfully assigned kuttl-test-vocal-spider/report-span-pskp4 to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:43 +0000 UTC Normal Pod report-span-pskp4 AddedInterface Add eth0 [10.129.2.79/23] from ovn-kubernetes logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:43 +0000 UTC Normal Pod report-span-pskp4.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:43 +0000 UTC Normal Pod report-span-pskp4.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:43 +0000 UTC Normal Pod report-span-pskp4.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:43 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-pskp4 job-controller logger.go:42: 08:13:55 | examples-agent-as-daemonset | 2023-10-09 08:13:54 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:13:55 | examples-agent-as-daemonset | Deleting namespace: kuttl-test-vocal-spider === CONT kuttl/harness/examples-auto-provision-kafka logger.go:42: 08:14:02 | examples-auto-provision-kafka | Creating namespace: kuttl-test-amazing-insect logger.go:42: 08:14:02 | examples-auto-provision-kafka/2-install | starting test step 2-install logger.go:42: 08:14:02 | examples-auto-provision-kafka/2-install | Jaeger:kuttl-test-amazing-insect/auto-provision-kafka created logger.go:42: 08:15:08 | examples-auto-provision-kafka/2-install | test step completed 2-install logger.go:42: 08:15:08 | examples-auto-provision-kafka/3- | starting test step 3- logger.go:42: 08:15:36 | examples-auto-provision-kafka/3- | test step completed 3- logger.go:42: 08:15:36 | examples-auto-provision-kafka/4- | starting test step 4- logger.go:42: 08:15:58 | examples-auto-provision-kafka/4- | test step completed 4- logger.go:42: 08:15:58 | examples-auto-provision-kafka/5- | starting test step 5- logger.go:42: 08:16:07 | examples-auto-provision-kafka/5- | test step completed 5- logger.go:42: 08:16:07 | examples-auto-provision-kafka/6-smoke-test | starting test step 6-smoke-test logger.go:42: 08:16:07 | examples-auto-provision-kafka/6-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE auto-provision-kafka /dev/null] logger.go:42: 08:16:09 | examples-auto-provision-kafka/6-smoke-test | Warning: resource jaegers/auto-provision-kafka is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:16:16 | examples-auto-provision-kafka/6-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JAEGER_COLLECTOR_ENDPOINT=http://auto-provision-kafka-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://auto-provision-kafka-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:16:16 | examples-auto-provision-kafka/6-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:16:17 | examples-auto-provision-kafka/6-smoke-test | job.batch/report-span created logger.go:42: 08:16:17 | examples-auto-provision-kafka/6-smoke-test | job.batch/check-span created logger.go:42: 08:16:29 | examples-auto-provision-kafka/6-smoke-test | test step completed 6-smoke-test logger.go:42: 08:16:29 | examples-auto-provision-kafka | examples-auto-provision-kafka events from ns kuttl-test-amazing-insect: logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:08 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestamazinginsectautoprovisionka-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestamazinginsectautoprovisionka-1-5cd869c9b9 to 1 deployment-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:09 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestamazinginsectautoprovisionka-1-5cd869c9b9 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestamazinginsectautoprovisionka-1-srg8s replicaset-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazinginsectautoprovisionka-1-srg8s Binding Scheduled Successfully assigned kuttl-test-amazing-insect/elasticsearch-cdm-kuttltestamazinginsectautoprovisionka-1-srg8s to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazinginsectautoprovisionka-1-srg8s AddedInterface Add eth0 [10.128.2.64/23] from ovn-kubernetes logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazinginsectautoprovisionka-1-srg8s.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazinginsectautoprovisionka-1-srg8s.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazinginsectautoprovisionka-1-srg8s.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazinginsectautoprovisionka-1-srg8s.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazinginsectautoprovisionka-1-srg8s.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamazinginsectautoprovisionka-1-srg8s.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:19 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestamazinginsectautoprovisionka-1-srg8s.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:24 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestamazinginsectautoprovisionka-1-srg8s.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:37 +0000 UTC Normal PodDisruptionBudget.policy auto-provision-kafka-zookeeper NoPods No matching pods found controllermanager logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:37 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:37 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-amazing-insect/data-auto-provision-kafka-zookeeper-0" logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:37 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 ExternalProvisioning waiting for a volume to be created, either by external provisioner "ebs.csi.aws.com" or manually created by system administrator persistentvolume-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:40 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-5c2b927e-8b35-4ad0-b2d0-f8cb26388012 logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:41 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-amazing-insect/auto-provision-kafka-zookeeper-0 to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:43 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-5c2b927e-8b35-4ad0-b2d0-f8cb26388012" attachdetach-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:47 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0 AddedInterface Add eth0 [10.129.2.80/23] from ovn-kubernetes logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:47 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0.spec.containers{zookeeper} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" already present on machine kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:47 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:14:47 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:08 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:09 +0000 UTC Normal PodDisruptionBudget.policy auto-provision-kafka-kafka NoPods No matching pods found controllermanager logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:09 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-amazing-insect/data-0-auto-provision-kafka-kafka-0" logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:09 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 ExternalProvisioning waiting for a volume to be created, either by external provisioner "ebs.csi.aws.com" or manually created by system administrator persistentvolume-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:12 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-8e841a26-2be4-4536-b57d-564e15775d18 logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:13 +0000 UTC Normal Pod auto-provision-kafka-kafka-0 Binding Scheduled Successfully assigned kuttl-test-amazing-insect/auto-provision-kafka-kafka-0 to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:15 +0000 UTC Normal Pod auto-provision-kafka-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-8e841a26-2be4-4536-b57d-564e15775d18" attachdetach-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:16 +0000 UTC Normal Pod auto-provision-kafka-kafka-0 AddedInterface Add eth0 [10.131.0.52/23] from ovn-kubernetes logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:16 +0000 UTC Normal Pod auto-provision-kafka-kafka-0.spec.containers{kafka} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" already present on machine kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:16 +0000 UTC Normal Pod auto-provision-kafka-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:16 +0000 UTC Normal Pod auto-provision-kafka-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:37 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6657f69cc5-7kpt2 Binding Scheduled Successfully assigned kuttl-test-amazing-insect/auto-provision-kafka-entity-operator-6657f69cc5-7kpt2 to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:37 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6657f69cc5-7kpt2 AddedInterface Add eth0 [10.131.0.53/23] from ovn-kubernetes logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:37 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6657f69cc5-7kpt2.spec.containers{topic-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:abdf1ca15c55860e7962057fcb84d2eef800996fcbc3f9f80eeb7efa79dbafcc" already present on machine kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:37 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6657f69cc5-7kpt2.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:37 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6657f69cc5-7kpt2.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:37 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6657f69cc5-7kpt2.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:abdf1ca15c55860e7962057fcb84d2eef800996fcbc3f9f80eeb7efa79dbafcc" already present on machine kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:37 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-entity-operator-6657f69cc5 SuccessfulCreate Created pod: auto-provision-kafka-entity-operator-6657f69cc5-7kpt2 replicaset-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:37 +0000 UTC Normal Deployment.apps auto-provision-kafka-entity-operator ScalingReplicaSet Scaled up replica set auto-provision-kafka-entity-operator-6657f69cc5 to 1 deployment-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:38 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6657f69cc5-7kpt2.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:38 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6657f69cc5-7kpt2.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:38 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6657f69cc5-7kpt2.spec.containers{tls-sidecar} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" already present on machine kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:38 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6657f69cc5-7kpt2.spec.containers{tls-sidecar} Created Created container tls-sidecar kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:15:38 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6657f69cc5-7kpt2.spec.containers{tls-sidecar} Started Started container tls-sidecar kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:01 +0000 UTC Normal Pod auto-provision-kafka-collector-58b846f8c6-4cvzf Binding Scheduled Successfully assigned kuttl-test-amazing-insect/auto-provision-kafka-collector-58b846f8c6-4cvzf to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:01 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-collector-58b846f8c6 SuccessfulCreate Created pod: auto-provision-kafka-collector-58b846f8c6-4cvzf replicaset-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:01 +0000 UTC Normal Deployment.apps auto-provision-kafka-collector ScalingReplicaSet Scaled up replica set auto-provision-kafka-collector-58b846f8c6 to 1 deployment-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:01 +0000 UTC Normal Pod auto-provision-kafka-ingester-86c68bd489-jc4bh Binding Scheduled Successfully assigned kuttl-test-amazing-insect/auto-provision-kafka-ingester-86c68bd489-jc4bh to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:01 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-ingester-86c68bd489 SuccessfulCreate Created pod: auto-provision-kafka-ingester-86c68bd489-jc4bh replicaset-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:01 +0000 UTC Normal Deployment.apps auto-provision-kafka-ingester ScalingReplicaSet Scaled up replica set auto-provision-kafka-ingester-86c68bd489 to 1 deployment-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:01 +0000 UTC Normal Pod auto-provision-kafka-query-6b9454c57-k9b2s Binding Scheduled Successfully assigned kuttl-test-amazing-insect/auto-provision-kafka-query-6b9454c57-k9b2s to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:01 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-query-6b9454c57 SuccessfulCreate Created pod: auto-provision-kafka-query-6b9454c57-k9b2s replicaset-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:01 +0000 UTC Normal Deployment.apps auto-provision-kafka-query ScalingReplicaSet Scaled up replica set auto-provision-kafka-query-6b9454c57 to 1 deployment-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:02 +0000 UTC Normal Pod auto-provision-kafka-collector-58b846f8c6-4cvzf AddedInterface Add eth0 [10.129.2.81/23] from ovn-kubernetes logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:02 +0000 UTC Normal Pod auto-provision-kafka-collector-58b846f8c6-4cvzf.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:02 +0000 UTC Normal Pod auto-provision-kafka-collector-58b846f8c6-4cvzf.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:02 +0000 UTC Normal Pod auto-provision-kafka-collector-58b846f8c6-4cvzf.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:02 +0000 UTC Normal Pod auto-provision-kafka-ingester-86c68bd489-jc4bh AddedInterface Add eth0 [10.131.0.54/23] from ovn-kubernetes logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:02 +0000 UTC Normal Pod auto-provision-kafka-ingester-86c68bd489-jc4bh.spec.containers{jaeger-ingester} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:6f019a3bc4c491d31a1af50e2929aa6e01b6c2c1fc12acbd0ef12204f4e56d07" kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:02 +0000 UTC Normal Pod auto-provision-kafka-query-6b9454c57-k9b2s AddedInterface Add eth0 [10.129.2.82/23] from ovn-kubernetes logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:02 +0000 UTC Normal Pod auto-provision-kafka-query-6b9454c57-k9b2s.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:02 +0000 UTC Normal Pod auto-provision-kafka-query-6b9454c57-k9b2s.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:02 +0000 UTC Normal Pod auto-provision-kafka-query-6b9454c57-k9b2s.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:02 +0000 UTC Normal Pod auto-provision-kafka-query-6b9454c57-k9b2s.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:02 +0000 UTC Normal Pod auto-provision-kafka-query-6b9454c57-k9b2s.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:02 +0000 UTC Normal Pod auto-provision-kafka-query-6b9454c57-k9b2s.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:02 +0000 UTC Normal Pod auto-provision-kafka-query-6b9454c57-k9b2s.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:02 +0000 UTC Normal Pod auto-provision-kafka-query-6b9454c57-k9b2s.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:02 +0000 UTC Normal Pod auto-provision-kafka-query-6b9454c57-k9b2s.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:06 +0000 UTC Normal Pod auto-provision-kafka-ingester-86c68bd489-jc4bh.spec.containers{jaeger-ingester} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:6f019a3bc4c491d31a1af50e2929aa6e01b6c2c1fc12acbd0ef12204f4e56d07" in 4.315577693s (4.315591543s including waiting) kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:06 +0000 UTC Normal Pod auto-provision-kafka-ingester-86c68bd489-jc4bh.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:06 +0000 UTC Normal Pod auto-provision-kafka-ingester-86c68bd489-jc4bh.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:13 +0000 UTC Normal Pod auto-provision-kafka-query-6b9454c57-k9b2s.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:13 +0000 UTC Normal Pod auto-provision-kafka-query-6b9454c57-k9b2s.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:13 +0000 UTC Normal Pod auto-provision-kafka-query-6b9454c57-k9b2s.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:13 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-query-6b9454c57 SuccessfulDelete Deleted pod: auto-provision-kafka-query-6b9454c57-k9b2s replicaset-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:13 +0000 UTC Normal Deployment.apps auto-provision-kafka-query ScalingReplicaSet Scaled down replica set auto-provision-kafka-query-6b9454c57 to 0 from 1 deployment-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:14 +0000 UTC Normal Pod auto-provision-kafka-query-565bd8bfd9-lz6v5 Binding Scheduled Successfully assigned kuttl-test-amazing-insect/auto-provision-kafka-query-565bd8bfd9-lz6v5 to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:14 +0000 UTC Normal Pod auto-provision-kafka-query-565bd8bfd9-lz6v5 AddedInterface Add eth0 [10.129.2.83/23] from ovn-kubernetes logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:14 +0000 UTC Normal Pod auto-provision-kafka-query-565bd8bfd9-lz6v5.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:14 +0000 UTC Normal Pod auto-provision-kafka-query-565bd8bfd9-lz6v5.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:14 +0000 UTC Normal Pod auto-provision-kafka-query-565bd8bfd9-lz6v5.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:14 +0000 UTC Normal Pod auto-provision-kafka-query-565bd8bfd9-lz6v5.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:14 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-query-565bd8bfd9 SuccessfulCreate Created pod: auto-provision-kafka-query-565bd8bfd9-lz6v5 replicaset-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:14 +0000 UTC Normal Deployment.apps auto-provision-kafka-query ScalingReplicaSet Scaled up replica set auto-provision-kafka-query-565bd8bfd9 to 1 deployment-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:15 +0000 UTC Normal Pod auto-provision-kafka-query-565bd8bfd9-lz6v5.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:15 +0000 UTC Normal Pod auto-provision-kafka-query-565bd8bfd9-lz6v5.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:15 +0000 UTC Normal Pod auto-provision-kafka-query-565bd8bfd9-lz6v5.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:15 +0000 UTC Normal Pod auto-provision-kafka-query-565bd8bfd9-lz6v5.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:15 +0000 UTC Normal Pod auto-provision-kafka-query-565bd8bfd9-lz6v5.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:17 +0000 UTC Normal Pod check-span-xnhqb Binding Scheduled Successfully assigned kuttl-test-amazing-insect/check-span-xnhqb to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:17 +0000 UTC Normal Pod check-span-xnhqb AddedInterface Add eth0 [10.129.2.84/23] from ovn-kubernetes logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:17 +0000 UTC Normal Pod check-span-xnhqb.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:17 +0000 UTC Normal Pod check-span-xnhqb.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:17 +0000 UTC Normal Pod check-span-xnhqb.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:17 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-xnhqb job-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:17 +0000 UTC Normal Pod report-span-f5zh6 Binding Scheduled Successfully assigned kuttl-test-amazing-insect/report-span-f5zh6 to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:17 +0000 UTC Normal Pod report-span-f5zh6 AddedInterface Add eth0 [10.131.0.55/23] from ovn-kubernetes logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:17 +0000 UTC Normal Pod report-span-f5zh6.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:17 +0000 UTC Normal Pod report-span-f5zh6.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:17 +0000 UTC Normal Pod report-span-f5zh6.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:17 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-f5zh6 job-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:22 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:22 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:22 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:22 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:22 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:22 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:16:29 | examples-auto-provision-kafka | 2023-10-09 08:16:28 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:16:29 | examples-auto-provision-kafka | Deleting namespace: kuttl-test-amazing-insect === CONT kuttl/harness/examples-all-in-one-with-options logger.go:42: 08:16:47 | examples-all-in-one-with-options | Creating namespace: kuttl-test-many-calf logger.go:42: 08:16:47 | examples-all-in-one-with-options/0-install | starting test step 0-install logger.go:42: 08:16:47 | examples-all-in-one-with-options/0-install | Jaeger:kuttl-test-many-calf/my-jaeger created logger.go:42: 08:16:54 | examples-all-in-one-with-options/0-install | test step completed 0-install logger.go:42: 08:16:54 | examples-all-in-one-with-options/1-smoke-test | starting test step 1-smoke-test logger.go:42: 08:16:54 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 08:16:56 | examples-all-in-one-with-options/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:17:02 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443/jaeger MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:17:02 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:17:03 | examples-all-in-one-with-options/1-smoke-test | job.batch/report-span created logger.go:42: 08:17:03 | examples-all-in-one-with-options/1-smoke-test | job.batch/check-span created logger.go:42: 08:17:15 | examples-all-in-one-with-options/1-smoke-test | test step completed 1-smoke-test logger.go:42: 08:17:15 | examples-all-in-one-with-options | examples-all-in-one-with-options events from ns kuttl-test-many-calf: logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:16:51 +0000 UTC Normal Pod my-jaeger-75ccd5f47-c2cv5 Binding Scheduled Successfully assigned kuttl-test-many-calf/my-jaeger-75ccd5f47-c2cv5 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:16:51 +0000 UTC Normal ReplicaSet.apps my-jaeger-75ccd5f47 SuccessfulCreate Created pod: my-jaeger-75ccd5f47-c2cv5 replicaset-controller logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:16:51 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-75ccd5f47 to 1 deployment-controller logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:16:52 +0000 UTC Normal Pod my-jaeger-75ccd5f47-c2cv5 AddedInterface Add eth0 [10.128.2.66/23] from ovn-kubernetes logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:16:52 +0000 UTC Normal Pod my-jaeger-75ccd5f47-c2cv5.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:16:52 +0000 UTC Normal Pod my-jaeger-75ccd5f47-c2cv5.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:16:52 +0000 UTC Normal Pod my-jaeger-75ccd5f47-c2cv5.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:16:52 +0000 UTC Normal Pod my-jaeger-75ccd5f47-c2cv5.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:16:52 +0000 UTC Normal Pod my-jaeger-75ccd5f47-c2cv5.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:16:52 +0000 UTC Normal Pod my-jaeger-75ccd5f47-c2cv5.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:00 +0000 UTC Normal Pod my-jaeger-75ccd5f47-c2cv5.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:00 +0000 UTC Normal Pod my-jaeger-75ccd5f47-c2cv5.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:00 +0000 UTC Normal ReplicaSet.apps my-jaeger-75ccd5f47 SuccessfulDelete Deleted pod: my-jaeger-75ccd5f47-c2cv5 replicaset-controller logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:00 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-75ccd5f47 to 0 from 1 deployment-controller logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:01 +0000 UTC Normal Pod my-jaeger-8fd74b84-4nqdj Binding Scheduled Successfully assigned kuttl-test-many-calf/my-jaeger-8fd74b84-4nqdj to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:01 +0000 UTC Normal Pod my-jaeger-8fd74b84-4nqdj AddedInterface Add eth0 [10.128.2.67/23] from ovn-kubernetes logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:01 +0000 UTC Normal Pod my-jaeger-8fd74b84-4nqdj.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:01 +0000 UTC Normal Pod my-jaeger-8fd74b84-4nqdj.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:01 +0000 UTC Normal Pod my-jaeger-8fd74b84-4nqdj.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:01 +0000 UTC Normal Pod my-jaeger-8fd74b84-4nqdj.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:01 +0000 UTC Normal Pod my-jaeger-8fd74b84-4nqdj.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:01 +0000 UTC Normal Pod my-jaeger-8fd74b84-4nqdj.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:01 +0000 UTC Normal ReplicaSet.apps my-jaeger-8fd74b84 SuccessfulCreate Created pod: my-jaeger-8fd74b84-4nqdj replicaset-controller logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:01 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-8fd74b84 to 1 deployment-controller logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:03 +0000 UTC Normal Pod check-span-wqqwt Binding Scheduled Successfully assigned kuttl-test-many-calf/check-span-wqqwt to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:03 +0000 UTC Normal Pod check-span-wqqwt AddedInterface Add eth0 [10.131.0.56/23] from ovn-kubernetes logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:03 +0000 UTC Normal Pod check-span-wqqwt.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:03 +0000 UTC Normal Pod check-span-wqqwt.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:03 +0000 UTC Normal Pod check-span-wqqwt.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:03 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-wqqwt job-controller logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:03 +0000 UTC Normal Pod report-span-2jqnv Binding Scheduled Successfully assigned kuttl-test-many-calf/report-span-2jqnv to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:03 +0000 UTC Normal Pod report-span-2jqnv AddedInterface Add eth0 [10.129.2.85/23] from ovn-kubernetes logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:03 +0000 UTC Normal Pod report-span-2jqnv.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:03 +0000 UTC Normal Pod report-span-2jqnv.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:03 +0000 UTC Normal Pod report-span-2jqnv.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:03 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-2jqnv job-controller logger.go:42: 08:17:15 | examples-all-in-one-with-options | 2023-10-09 08:17:14 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:17:15 | examples-all-in-one-with-options | Deleting namespace: kuttl-test-many-calf === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- FAIL: kuttl (1725.47s) --- FAIL: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.94s) --- PASS: kuttl/harness/examples-service-types (60.06s) --- PASS: kuttl/harness/examples-with-sampling (56.47s) --- PASS: kuttl/harness/examples-with-cassandra (98.43s) --- FAIL: kuttl/harness/examples-with-badger-and-volume (446.63s) --- PASS: kuttl/harness/examples-with-badger (39.61s) --- PASS: kuttl/harness/examples-simplest (39.27s) --- PASS: kuttl/harness/examples-simple-prod-with-volumes (68.51s) --- PASS: kuttl/harness/examples-simple-prod (67.01s) --- PASS: kuttl/harness/examples-business-application-injected-sidecar (48.11s) --- PASS: kuttl/harness/examples-openshift-with-htpasswd (24.01s) --- PASS: kuttl/harness/examples-openshift-agent-as-daemonset (50.52s) --- PASS: kuttl/harness/examples-collector-with-priority-class (38.63s) --- FAIL: kuttl/harness/examples-agent-with-priority-class (442.21s) --- PASS: kuttl/harness/examples-agent-as-daemonset (35.73s) --- PASS: kuttl/harness/examples-auto-provision-kafka (164.89s) --- PASS: kuttl/harness/examples-all-in-one-with-options (39.42s) FAIL + exit_code=1 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name examples --report --output /logs/artifacts/examples.xml ./artifacts/kuttl-report.xml time="2023-10-09T08:17:28Z" level=debug msg="Setting a new name for the test suites" time="2023-10-09T08:17:28Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-10-09T08:17:28Z" level=debug msg="normalizing test case names" time="2023-10-09T08:17:28Z" level=debug msg="examples/artifacts -> examples_artifacts" time="2023-10-09T08:17:28Z" level=debug msg="examples/examples-service-types -> examples_examples_service_types" time="2023-10-09T08:17:28Z" level=debug msg="examples/examples-with-sampling -> examples_examples_with_sampling" time="2023-10-09T08:17:28Z" level=debug msg="examples/examples-with-cassandra -> examples_examples_with_cassandra" time="2023-10-09T08:17:28Z" level=debug msg="examples/examples-with-badger-and-volume -> examples_examples_with_badger_and_volume" time="2023-10-09T08:17:28Z" level=debug msg="examples/examples-with-badger -> examples_examples_with_badger" time="2023-10-09T08:17:28Z" level=debug msg="examples/examples-simplest -> examples_examples_simplest" time="2023-10-09T08:17:28Z" level=debug msg="examples/examples-simple-prod-with-volumes -> examples_examples_simple_prod_with_volumes" time="2023-10-09T08:17:28Z" level=debug msg="examples/examples-simple-prod -> examples_examples_simple_prod" time="2023-10-09T08:17:28Z" level=debug msg="examples/examples-business-application-injected-sidecar -> examples_examples_business_application_injected_sidecar" time="2023-10-09T08:17:28Z" level=debug msg="examples/examples-openshift-with-htpasswd -> examples_examples_openshift_with_htpasswd" time="2023-10-09T08:17:28Z" level=debug msg="examples/examples-openshift-agent-as-daemonset -> examples_examples_openshift_agent_as_daemonset" time="2023-10-09T08:17:28Z" level=debug msg="examples/examples-collector-with-priority-class -> examples_examples_collector_with_priority_class" time="2023-10-09T08:17:28Z" level=debug msg="examples/examples-agent-with-priority-class -> examples_examples_agent_with_priority_class" time="2023-10-09T08:17:28Z" level=debug msg="examples/examples-agent-as-daemonset -> examples_examples_agent_as_daemonset" time="2023-10-09T08:17:28Z" level=debug msg="examples/examples-auto-provision-kafka -> examples_examples_auto_provision_kafka" time="2023-10-09T08:17:28Z" level=debug msg="examples/examples-all-in-one-with-options -> examples_examples_all_in_one_with_options" +---------------------------------------------------------+--------+ | NAME | RESULT | +---------------------------------------------------------+--------+ | examples_artifacts | passed | | examples_examples_service_types | passed | | examples_examples_with_sampling | passed | | examples_examples_with_cassandra | passed | | examples_examples_with_badger_and_volume | failed | | examples_examples_with_badger | passed | | examples_examples_simplest | passed | | examples_examples_simple_prod_with_volumes | passed | | examples_examples_simple_prod | passed | | examples_examples_business_application_injected_sidecar | passed | | examples_examples_openshift_with_htpasswd | passed | | examples_examples_openshift_agent_as_daemonset | passed | | examples_examples_collector_with_priority_class | passed | | examples_examples_agent_with_priority_class | failed | | examples_examples_agent_as_daemonset | passed | | examples_examples_auto_provision_kafka | passed | | examples_examples_all_in_one_with_options | passed | +---------------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 2 -gt 0 ']' + count=1 + '[' 1 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh generate false true + '[' 3 -ne 3 ']' + test_suite_name=generate + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/generate.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-generate make[2]: Entering directory '/tmp/jaeger-tests' test -s /tmp/jaeger-tests/bin/operator-sdk || curl -sLo /tmp/jaeger-tests/bin/operator-sdk https://github.com/operator-framework/operator-sdk/releases/download/v1.27.0/operator-sdk_`go env GOOS`_`go env GOARCH` ./hack/install/install-golangci-lint.sh Installing golangci-lint golangci-lint 1.53.2 is installed already ./hack/install/install-goimports.sh Installing goimports Try 0... go install golang.org/x/tools/cmd/goimports@v0.1.12 >>>> Formatting code... ./.ci/format.sh >>>> Building... ./hack/install/install-dependencies.sh Installing go dependencies Try 0... go mod download GOOS= GOARCH= CGO_ENABLED=0 GO111MODULE=on go build -ldflags "-X "github.com/jaegertracing/jaeger-operator/pkg/version".version="1.49.0" -X "github.com/jaegertracing/jaeger-operator/pkg/version".buildDate=2023-10-09T08:17:30Z -X "github.com/jaegertracing/jaeger-operator/pkg/version".defaultJaeger="1.49.0"" -o "bin/jaeger-operator" main.go JAEGER_VERSION="1.49.0" ./tests/e2e/generate/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-10-06-234925 True False 51m Cluster version is 4.14.0-0.nightly-2023-10-06-234925' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-10-06-234925 True False 51m Cluster version is 4.14.0-0.nightly-2023-10-06-234925' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/generate/render.sh ++ export SUITE_DIR=./tests/e2e/generate ++ SUITE_DIR=./tests/e2e/generate ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/generate ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + '[' true = true ']' + skip_test generate 'This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 2 -ne 2 ']' + test_name=generate + message='This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/generate/_build + '[' _build '!=' _build ']' + rm -rf generate + warning 'generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed\e[0m' WAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running generate E2E tests' Running generate E2E tests + cd tests/e2e/generate/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1500832312 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 08:17:53 | artifacts | Creating namespace: kuttl-test-eminent-doe logger.go:42: 08:17:53 | artifacts | artifacts events from ns kuttl-test-eminent-doe: logger.go:42: 08:17:53 | artifacts | Deleting namespace: kuttl-test-eminent-doe === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (6.05s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.96s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name generate --report --output /logs/artifacts/generate.xml ./artifacts/kuttl-report.xml time="2023-10-09T08:18:00Z" level=debug msg="Setting a new name for the test suites" time="2023-10-09T08:18:00Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-10-09T08:18:00Z" level=debug msg="normalizing test case names" time="2023-10-09T08:18:00Z" level=debug msg="generate/artifacts -> generate_artifacts" +--------------------+--------+ | NAME | RESULT | +--------------------+--------+ | generate_artifacts | passed | +--------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 2 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + '[' 1 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh miscellaneous false true + '[' 3 -ne 3 ']' + test_suite_name=miscellaneous + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/miscellaneous.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-miscellaneous make[2]: Entering directory '/tmp/jaeger-tests' SKIP_ES_EXTERNAL=true ./tests/e2e/miscellaneous/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-10-06-234925 True False 51m Cluster version is 4.14.0-0.nightly-2023-10-06-234925' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-10-06-234925 True False 51m Cluster version is 4.14.0-0.nightly-2023-10-06-234925' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/miscellaneous/render.sh ++ export SUITE_DIR=./tests/e2e/miscellaneous ++ SUITE_DIR=./tests/e2e/miscellaneous ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/miscellaneous ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + '[' true = true ']' + skip_test cassandra-spark 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=cassandra-spark + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + rm -rf cassandra-spark + warning 'cassandra-spark: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: cassandra-spark: Test not supported in OpenShift\e[0m' WAR: cassandra-spark: Test not supported in OpenShift + start_test collector-autoscale + '[' 1 -ne 1 ']' + test_name=collector-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-autoscale\e[0m' Rendering files for test collector-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p collector-autoscale + cd collector-autoscale + jaeger_name=simple-prod + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + ELASTICSEARCH_NODECOUNT=1 + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.resources.requests.memory="200m"' 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.autoscale=true 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.minReplicas=1 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.maxReplicas=2 01-install.yaml + kubectl api-versions + grep autoscaling/v2beta2 -q + rm ./04-assert.yaml + generate_otlp_e2e_tests http + test_protocol=http + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-http\e[0m' Rendering files for test collector-otlp-allinone-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-autoscale + '[' collector-autoscale '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-http + cd collector-otlp-allinone-http + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger http true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-http\e[0m' Rendering files for test collector-otlp-production-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-http + '[' collector-otlp-allinone-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-http + cd collector-otlp-production-http + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger http true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + generate_otlp_e2e_tests grpc + test_protocol=grpc + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-grpc\e[0m' Rendering files for test collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-http + '[' collector-otlp-production-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-grpc + cd collector-otlp-allinone-grpc + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-grpc\e[0m' Rendering files for test collector-otlp-production-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-grpc + '[' collector-otlp-allinone-grpc '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-grpc + cd collector-otlp-production-grpc + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + '[' true = true ']' + skip_test istio 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=istio + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-grpc + '[' collector-otlp-production-grpc '!=' _build ']' + cd .. + rm -rf istio + warning 'istio: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: istio: Test not supported in OpenShift\e[0m' WAR: istio: Test not supported in OpenShift + '[' true = true ']' + skip_test outside-cluster 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=outside-cluster + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + rm -rf outside-cluster + warning 'outside-cluster: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: outside-cluster: Test not supported in OpenShift\e[0m' WAR: outside-cluster: Test not supported in OpenShift + start_test set-custom-img + '[' 1 -ne 1 ']' + test_name=set-custom-img + echo =========================================================================== =========================================================================== + info 'Rendering files for test set-custom-img' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test set-custom-img\e[0m' Rendering files for test set-custom-img + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p set-custom-img + cd set-custom-img + jaeger_name=my-jaeger + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.image="test"' ./02-install.yaml + '[' true = true ']' + skip_test non-cluster-wide 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=non-cluster-wide + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/set-custom-img + '[' set-custom-img '!=' _build ']' + cd .. + rm -rf non-cluster-wide + warning 'non-cluster-wide: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: non-cluster-wide: Test not supported in OpenShift\e[0m' WAR: non-cluster-wide: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running miscellaneous E2E tests' Running miscellaneous E2E tests + cd tests/e2e/miscellaneous/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1500832312 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 7 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/collector-autoscale === PAUSE kuttl/harness/collector-autoscale === RUN kuttl/harness/collector-otlp-allinone-grpc === PAUSE kuttl/harness/collector-otlp-allinone-grpc === RUN kuttl/harness/collector-otlp-allinone-http === PAUSE kuttl/harness/collector-otlp-allinone-http === RUN kuttl/harness/collector-otlp-production-grpc === PAUSE kuttl/harness/collector-otlp-production-grpc === RUN kuttl/harness/collector-otlp-production-http === PAUSE kuttl/harness/collector-otlp-production-http === RUN kuttl/harness/set-custom-img === PAUSE kuttl/harness/set-custom-img === CONT kuttl/harness/artifacts logger.go:42: 08:18:11 | artifacts | Creating namespace: kuttl-test-key-dassie logger.go:42: 08:18:11 | artifacts | artifacts events from ns kuttl-test-key-dassie: logger.go:42: 08:18:11 | artifacts | Deleting namespace: kuttl-test-key-dassie === CONT kuttl/harness/collector-otlp-production-grpc logger.go:42: 08:18:17 | collector-otlp-production-grpc | Creating namespace: kuttl-test-fit-sloth logger.go:42: 08:18:17 | collector-otlp-production-grpc/1-install | starting test step 1-install logger.go:42: 08:18:17 | collector-otlp-production-grpc/1-install | Jaeger:kuttl-test-fit-sloth/my-jaeger created logger.go:42: 08:18:54 | collector-otlp-production-grpc/1-install | test step completed 1-install logger.go:42: 08:18:54 | collector-otlp-production-grpc/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:18:54 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 08:18:56 | collector-otlp-production-grpc/2-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:19:03 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c REPORTING_PROTOCOL=grpc ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 08:19:03 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:19:03 | collector-otlp-production-grpc/2-smoke-test | job.batch/report-span created logger.go:42: 08:19:03 | collector-otlp-production-grpc/2-smoke-test | job.batch/check-span created logger.go:42: 08:19:24 | collector-otlp-production-grpc/2-smoke-test | test step completed 2-smoke-test logger.go:42: 08:19:24 | collector-otlp-production-grpc | collector-otlp-production-grpc events from ns kuttl-test-fit-sloth: logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitslothmyjaeger-1-59dbd9cdbf-mvjw5 Binding Scheduled Successfully assigned kuttl-test-fit-sloth/elasticsearch-cdm-kuttltestfitslothmyjaeger-1-59dbd9cdbf-mvjw5 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitslothmyjaeger-1-59dbd9cdbf-mvjw5 AddedInterface Add eth0 [10.128.2.68/23] from ovn-kubernetes logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitslothmyjaeger-1-59dbd9cdbf-mvjw5.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitslothmyjaeger-1-59dbd9cdbf-mvjw5.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitslothmyjaeger-1-59dbd9cdbf-mvjw5.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitslothmyjaeger-1-59dbd9cdbf-mvjw5.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitslothmyjaeger-1-59dbd9cdbf-mvjw5.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfitslothmyjaeger-1-59dbd9cdbf-mvjw5.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:23 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestfitslothmyjaeger-1-59dbd9cdbf SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestfitslothmyjaeger-1-59dbd9cdbf-mvjw5 replicaset-controller logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:23 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestfitslothmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestfitslothmyjaeger-1-59dbd9cdbf to 1 deployment-controller logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:33 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestfitslothmyjaeger-1-59dbd9cdbf-mvjw5.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:38 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestfitslothmyjaeger-1-59dbd9cdbf-mvjw5.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:50 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-qxwct Binding Scheduled Successfully assigned kuttl-test-fit-sloth/my-jaeger-collector-558ccfc8dd-qxwct to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:50 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-qxwct AddedInterface Add eth0 [10.131.0.57/23] from ovn-kubernetes logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:50 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-qxwct.spec.containers{jaeger-collector} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:50 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-558ccfc8dd SuccessfulCreate Created pod: my-jaeger-collector-558ccfc8dd-qxwct replicaset-controller logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:50 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-558ccfc8dd to 1 deployment-controller logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:50 +0000 UTC Normal Pod my-jaeger-query-68fcddff9-dm76p Binding Scheduled Successfully assigned kuttl-test-fit-sloth/my-jaeger-query-68fcddff9-dm76p to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:50 +0000 UTC Normal Pod my-jaeger-query-68fcddff9-dm76p AddedInterface Add eth0 [10.129.2.86/23] from ovn-kubernetes logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:50 +0000 UTC Normal Pod my-jaeger-query-68fcddff9-dm76p.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:50 +0000 UTC Normal Pod my-jaeger-query-68fcddff9-dm76p.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:50 +0000 UTC Normal Pod my-jaeger-query-68fcddff9-dm76p.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:50 +0000 UTC Normal Pod my-jaeger-query-68fcddff9-dm76p.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:50 +0000 UTC Normal Pod my-jaeger-query-68fcddff9-dm76p.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:50 +0000 UTC Normal Pod my-jaeger-query-68fcddff9-dm76p.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:50 +0000 UTC Normal Pod my-jaeger-query-68fcddff9-dm76p.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:50 +0000 UTC Normal Pod my-jaeger-query-68fcddff9-dm76p.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:50 +0000 UTC Normal Pod my-jaeger-query-68fcddff9-dm76p.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:50 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-68fcddff9 SuccessfulCreate Created pod: my-jaeger-query-68fcddff9-dm76p replicaset-controller logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:50 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-68fcddff9 to 1 deployment-controller logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:52 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-qxwct.spec.containers{jaeger-collector} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" in 1.816200459s (1.81622061s including waiting) kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:52 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-qxwct.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:52 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-qxwct.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:57 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-68fcddff9 SuccessfulDelete Deleted pod: my-jaeger-query-68fcddff9-dm76p replicaset-controller logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:57 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-68fcddff9 to 0 from 1 deployment-controller logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:58 +0000 UTC Normal Pod my-jaeger-query-68fcddff9-dm76p.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:58 +0000 UTC Normal Pod my-jaeger-query-68fcddff9-dm76p.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:58 +0000 UTC Normal Pod my-jaeger-query-68fcddff9-dm76p.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:58 +0000 UTC Normal Pod my-jaeger-query-7995c95697-kx7ln Binding Scheduled Successfully assigned kuttl-test-fit-sloth/my-jaeger-query-7995c95697-kx7ln to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:58 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-7995c95697 SuccessfulCreate Created pod: my-jaeger-query-7995c95697-kx7ln replicaset-controller logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:58 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-7995c95697 to 1 deployment-controller logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:59 +0000 UTC Normal Pod my-jaeger-query-7995c95697-kx7ln AddedInterface Add eth0 [10.129.2.87/23] from ovn-kubernetes logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:59 +0000 UTC Normal Pod my-jaeger-query-7995c95697-kx7ln.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:59 +0000 UTC Normal Pod my-jaeger-query-7995c95697-kx7ln.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:59 +0000 UTC Normal Pod my-jaeger-query-7995c95697-kx7ln.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:59 +0000 UTC Normal Pod my-jaeger-query-7995c95697-kx7ln.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:59 +0000 UTC Normal Pod my-jaeger-query-7995c95697-kx7ln.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:59 +0000 UTC Normal Pod my-jaeger-query-7995c95697-kx7ln.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:59 +0000 UTC Normal Pod my-jaeger-query-7995c95697-kx7ln.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:59 +0000 UTC Normal Pod my-jaeger-query-7995c95697-kx7ln.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:18:59 +0000 UTC Normal Pod my-jaeger-query-7995c95697-kx7ln.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:19:03 +0000 UTC Normal Pod check-span-cl2vs Binding Scheduled Successfully assigned kuttl-test-fit-sloth/check-span-cl2vs to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:19:03 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-cl2vs job-controller logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:19:03 +0000 UTC Normal Pod report-span-sjh9r Binding Scheduled Successfully assigned kuttl-test-fit-sloth/report-span-sjh9r to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:19:03 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-sjh9r job-controller logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:19:04 +0000 UTC Normal Pod check-span-cl2vs AddedInterface Add eth0 [10.131.0.59/23] from ovn-kubernetes logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:19:04 +0000 UTC Normal Pod check-span-cl2vs.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:19:04 +0000 UTC Normal Pod check-span-cl2vs.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:19:04 +0000 UTC Normal Pod check-span-cl2vs.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:19:04 +0000 UTC Normal Pod report-span-sjh9r AddedInterface Add eth0 [10.131.0.58/23] from ovn-kubernetes logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:19:04 +0000 UTC Normal Pod report-span-sjh9r.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:19:04 +0000 UTC Normal Pod report-span-sjh9r.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:19:04 +0000 UTC Normal Pod report-span-sjh9r.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:19:05 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:19:05 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:19:05 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:19:24 | collector-otlp-production-grpc | 2023-10-09 08:19:23 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:19:24 | collector-otlp-production-grpc | Deleting namespace: kuttl-test-fit-sloth === CONT kuttl/harness/set-custom-img logger.go:42: 08:19:36 | set-custom-img | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:19:36 | set-custom-img | Ignoring check-collector-img.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:19:36 | set-custom-img | Creating namespace: kuttl-test-secure-reptile logger.go:42: 08:19:36 | set-custom-img/1-install | starting test step 1-install logger.go:42: 08:19:36 | set-custom-img/1-install | Jaeger:kuttl-test-secure-reptile/my-jaeger created logger.go:42: 08:20:12 | set-custom-img/1-install | test step completed 1-install logger.go:42: 08:20:12 | set-custom-img/2-install | starting test step 2-install logger.go:42: 08:20:12 | set-custom-img/2-install | Jaeger:kuttl-test-secure-reptile/my-jaeger updated logger.go:42: 08:20:12 | set-custom-img/2-install | test step completed 2-install logger.go:42: 08:20:12 | set-custom-img/3-check-image | starting test step 3-check-image logger.go:42: 08:20:12 | set-custom-img/3-check-image | running command: [sh -c ./check-collector-img.sh] logger.go:42: 08:20:12 | set-custom-img/3-check-image | Collector image missmatch. Expected: test. Has: registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c logger.go:42: 08:20:17 | set-custom-img/3-check-image | Collector image asserted properly! logger.go:42: 08:20:17 | set-custom-img/3-check-image | test step completed 3-check-image logger.go:42: 08:20:17 | set-custom-img | set-custom-img events from ns kuttl-test-secure-reptile: logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:19:42 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestsecurereptilemyjaeger-1-7ffb49ff SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestsecurereptilemyjaeger-1-7ffb49fvzdwm replicaset-controller logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:19:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsecurereptilemyjaeger-1-7ffb49fvzdwm Binding Scheduled Successfully assigned kuttl-test-secure-reptile/elasticsearch-cdm-kuttltestsecurereptilemyjaeger-1-7ffb49fvzdwm to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:19:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsecurereptilemyjaeger-1-7ffb49fvzdwm AddedInterface Add eth0 [10.128.2.69/23] from ovn-kubernetes logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:19:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsecurereptilemyjaeger-1-7ffb49fvzdwm.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:19:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsecurereptilemyjaeger-1-7ffb49fvzdwm.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:19:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsecurereptilemyjaeger-1-7ffb49fvzdwm.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:19:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsecurereptilemyjaeger-1-7ffb49fvzdwm.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:19:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsecurereptilemyjaeger-1-7ffb49fvzdwm.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:19:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsecurereptilemyjaeger-1-7ffb49fvzdwm.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:19:42 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestsecurereptilemyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestsecurereptilemyjaeger-1-7ffb49ff to 1 deployment-controller logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:19:52 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestsecurereptilemyjaeger-1-7ffb49fvzdwm.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:19:57 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestsecurereptilemyjaeger-1-7ffb49fvzdwm.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:09 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-tztrv Binding Scheduled Successfully assigned kuttl-test-secure-reptile/my-jaeger-collector-558ccfc8dd-tztrv to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:09 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-tztrv AddedInterface Add eth0 [10.131.0.60/23] from ovn-kubernetes logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:09 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-tztrv.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:09 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-tztrv.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:09 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-tztrv.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:09 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-558ccfc8dd SuccessfulCreate Created pod: my-jaeger-collector-558ccfc8dd-tztrv replicaset-controller logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:09 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-558ccfc8dd to 1 deployment-controller logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:09 +0000 UTC Normal Pod my-jaeger-query-74c87dc66-ft5ll Binding Scheduled Successfully assigned kuttl-test-secure-reptile/my-jaeger-query-74c87dc66-ft5ll to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:09 +0000 UTC Normal Pod my-jaeger-query-74c87dc66-ft5ll AddedInterface Add eth0 [10.129.2.88/23] from ovn-kubernetes logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:09 +0000 UTC Normal Pod my-jaeger-query-74c87dc66-ft5ll.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:09 +0000 UTC Normal Pod my-jaeger-query-74c87dc66-ft5ll.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:09 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-74c87dc66 SuccessfulCreate Created pod: my-jaeger-query-74c87dc66-ft5ll replicaset-controller logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:09 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-74c87dc66 to 1 deployment-controller logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:10 +0000 UTC Normal Pod my-jaeger-query-74c87dc66-ft5ll.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:10 +0000 UTC Normal Pod my-jaeger-query-74c87dc66-ft5ll.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:10 +0000 UTC Normal Pod my-jaeger-query-74c87dc66-ft5ll.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:10 +0000 UTC Normal Pod my-jaeger-query-74c87dc66-ft5ll.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:10 +0000 UTC Normal Pod my-jaeger-query-74c87dc66-ft5ll.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:10 +0000 UTC Normal Pod my-jaeger-query-74c87dc66-ft5ll.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:10 +0000 UTC Normal Pod my-jaeger-query-74c87dc66-ft5ll.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:16 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-tztrv.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:16 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-558ccfc8dd SuccessfulDelete Deleted pod: my-jaeger-collector-558ccfc8dd-tztrv replicaset-controller logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:16 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled down replica set my-jaeger-collector-558ccfc8dd to 0 from 1 deployment-controller logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:17 +0000 UTC Normal Pod my-jaeger-collector-6755b759f8-vdgjn Binding Scheduled Successfully assigned kuttl-test-secure-reptile/my-jaeger-collector-6755b759f8-vdgjn to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:17 +0000 UTC Normal Pod my-jaeger-collector-6755b759f8-vdgjn AddedInterface Add eth0 [10.131.0.61/23] from ovn-kubernetes logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:17 +0000 UTC Normal Pod my-jaeger-collector-6755b759f8-vdgjn.spec.containers{jaeger-collector} Pulling Pulling image "test" kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:17 +0000 UTC Warning Pod my-jaeger-collector-6755b759f8-vdgjn.spec.containers{jaeger-collector} Failed Failed to pull image "test": rpc error: code = Unknown desc = reading manifest latest in docker.io/library/test: requested access to the resource is denied kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:17 +0000 UTC Warning Pod my-jaeger-collector-6755b759f8-vdgjn.spec.containers{jaeger-collector} Failed Error: ErrImagePull kubelet logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:17 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-6755b759f8 SuccessfulCreate Created pod: my-jaeger-collector-6755b759f8-vdgjn replicaset-controller logger.go:42: 08:20:17 | set-custom-img | 2023-10-09 08:20:17 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-6755b759f8 to 1 deployment-controller logger.go:42: 08:20:17 | set-custom-img | Deleting namespace: kuttl-test-secure-reptile === CONT kuttl/harness/collector-otlp-production-http logger.go:42: 08:20:24 | collector-otlp-production-http | Creating namespace: kuttl-test-cheerful-piglet logger.go:42: 08:20:24 | collector-otlp-production-http/1-install | starting test step 1-install logger.go:42: 08:20:24 | collector-otlp-production-http/1-install | Jaeger:kuttl-test-cheerful-piglet/my-jaeger created logger.go:42: 08:21:01 | collector-otlp-production-http/1-install | test step completed 1-install logger.go:42: 08:21:01 | collector-otlp-production-http/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:21:01 | collector-otlp-production-http/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 08:21:03 | collector-otlp-production-http/2-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:21:09 | collector-otlp-production-http/2-smoke-test | running command: [sh -c REPORTING_PROTOCOL=http ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 08:21:10 | collector-otlp-production-http/2-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:21:10 | collector-otlp-production-http/2-smoke-test | job.batch/report-span created logger.go:42: 08:21:10 | collector-otlp-production-http/2-smoke-test | job.batch/check-span created logger.go:42: 08:21:22 | collector-otlp-production-http/2-smoke-test | test step completed 2-smoke-test logger.go:42: 08:21:22 | collector-otlp-production-http | collector-otlp-production-http events from ns kuttl-test-cheerful-piglet: logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:31 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestcheerfulpigletmyjaeger-1-7bc8c68d9c SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestcheerfulpigletmyjaeger-1-7bc8c6bkl8v replicaset-controller logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcheerfulpigletmyjaeger-1-7bc8c6bkl8v Binding Scheduled Successfully assigned kuttl-test-cheerful-piglet/elasticsearch-cdm-kuttltestcheerfulpigletmyjaeger-1-7bc8c6bkl8v to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcheerfulpigletmyjaeger-1-7bc8c6bkl8v AddedInterface Add eth0 [10.128.2.70/23] from ovn-kubernetes logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcheerfulpigletmyjaeger-1-7bc8c6bkl8v.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcheerfulpigletmyjaeger-1-7bc8c6bkl8v.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcheerfulpigletmyjaeger-1-7bc8c6bkl8v.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcheerfulpigletmyjaeger-1-7bc8c6bkl8v.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:31 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestcheerfulpigletmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestcheerfulpigletmyjaeger-1-7bc8c68d9c to 1 deployment-controller logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcheerfulpigletmyjaeger-1-7bc8c6bkl8v.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcheerfulpigletmyjaeger-1-7bc8c6bkl8v.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:41 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestcheerfulpigletmyjaeger-1-7bc8c6bkl8v.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:47 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestcheerfulpigletmyjaeger-1-7bc8c6bkl8v.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:58 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-2xcbq Binding Scheduled Successfully assigned kuttl-test-cheerful-piglet/my-jaeger-collector-558ccfc8dd-2xcbq to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:58 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-2xcbq AddedInterface Add eth0 [10.131.0.62/23] from ovn-kubernetes logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:58 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-2xcbq.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:58 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-2xcbq.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:58 +0000 UTC Normal Pod my-jaeger-collector-558ccfc8dd-2xcbq.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:58 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-558ccfc8dd SuccessfulCreate Created pod: my-jaeger-collector-558ccfc8dd-2xcbq replicaset-controller logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:58 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-558ccfc8dd to 1 deployment-controller logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:58 +0000 UTC Normal Pod my-jaeger-query-df989b99d-2fgtp Binding Scheduled Successfully assigned kuttl-test-cheerful-piglet/my-jaeger-query-df989b99d-2fgtp to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:58 +0000 UTC Normal Pod my-jaeger-query-df989b99d-2fgtp AddedInterface Add eth0 [10.129.2.89/23] from ovn-kubernetes logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:58 +0000 UTC Normal Pod my-jaeger-query-df989b99d-2fgtp.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:58 +0000 UTC Normal Pod my-jaeger-query-df989b99d-2fgtp.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:58 +0000 UTC Normal Pod my-jaeger-query-df989b99d-2fgtp.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:58 +0000 UTC Normal Pod my-jaeger-query-df989b99d-2fgtp.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:58 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-df989b99d SuccessfulCreate Created pod: my-jaeger-query-df989b99d-2fgtp replicaset-controller logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:58 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-df989b99d to 1 deployment-controller logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:59 +0000 UTC Normal Pod my-jaeger-query-df989b99d-2fgtp.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:59 +0000 UTC Normal Pod my-jaeger-query-df989b99d-2fgtp.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:59 +0000 UTC Normal Pod my-jaeger-query-df989b99d-2fgtp.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:59 +0000 UTC Normal Pod my-jaeger-query-df989b99d-2fgtp.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:20:59 +0000 UTC Normal Pod my-jaeger-query-df989b99d-2fgtp.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:05 +0000 UTC Normal Pod my-jaeger-query-df989b99d-2fgtp.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:05 +0000 UTC Normal Pod my-jaeger-query-df989b99d-2fgtp.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:05 +0000 UTC Normal Pod my-jaeger-query-df989b99d-2fgtp.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:05 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-df989b99d SuccessfulDelete Deleted pod: my-jaeger-query-df989b99d-2fgtp replicaset-controller logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:05 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-df989b99d to 0 from 1 deployment-controller logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:06 +0000 UTC Normal Pod my-jaeger-query-7878b7474d-2jxvt Binding Scheduled Successfully assigned kuttl-test-cheerful-piglet/my-jaeger-query-7878b7474d-2jxvt to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:06 +0000 UTC Normal Pod my-jaeger-query-7878b7474d-2jxvt AddedInterface Add eth0 [10.129.2.90/23] from ovn-kubernetes logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:06 +0000 UTC Normal Pod my-jaeger-query-7878b7474d-2jxvt.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:06 +0000 UTC Normal Pod my-jaeger-query-7878b7474d-2jxvt.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:06 +0000 UTC Normal Pod my-jaeger-query-7878b7474d-2jxvt.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:06 +0000 UTC Normal Pod my-jaeger-query-7878b7474d-2jxvt.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:06 +0000 UTC Normal Pod my-jaeger-query-7878b7474d-2jxvt.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:06 +0000 UTC Normal Pod my-jaeger-query-7878b7474d-2jxvt.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:06 +0000 UTC Normal Pod my-jaeger-query-7878b7474d-2jxvt.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:06 +0000 UTC Normal Pod my-jaeger-query-7878b7474d-2jxvt.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:06 +0000 UTC Normal Pod my-jaeger-query-7878b7474d-2jxvt.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:06 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-7878b7474d SuccessfulCreate Created pod: my-jaeger-query-7878b7474d-2jxvt replicaset-controller logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:06 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-7878b7474d to 1 deployment-controller logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:10 +0000 UTC Normal Pod check-span-vgj5d Binding Scheduled Successfully assigned kuttl-test-cheerful-piglet/check-span-vgj5d to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:10 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-vgj5d job-controller logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:10 +0000 UTC Normal Pod report-span-775bc Binding Scheduled Successfully assigned kuttl-test-cheerful-piglet/report-span-775bc to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:10 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-775bc job-controller logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:11 +0000 UTC Normal Pod check-span-vgj5d AddedInterface Add eth0 [10.131.0.64/23] from ovn-kubernetes logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:11 +0000 UTC Normal Pod check-span-vgj5d.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:11 +0000 UTC Normal Pod check-span-vgj5d.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:11 +0000 UTC Normal Pod check-span-vgj5d.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:11 +0000 UTC Normal Pod report-span-775bc AddedInterface Add eth0 [10.131.0.63/23] from ovn-kubernetes logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:11 +0000 UTC Normal Pod report-span-775bc.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:11 +0000 UTC Normal Pod report-span-775bc.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:11 +0000 UTC Normal Pod report-span-775bc.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:13 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:13 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:13 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:21:22 | collector-otlp-production-http | 2023-10-09 08:21:22 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:21:22 | collector-otlp-production-http | Deleting namespace: kuttl-test-cheerful-piglet === CONT kuttl/harness/collector-otlp-allinone-grpc logger.go:42: 08:21:34 | collector-otlp-allinone-grpc | Creating namespace: kuttl-test-accepted-locust logger.go:42: 08:21:34 | collector-otlp-allinone-grpc/0-install | starting test step 0-install logger.go:42: 08:21:35 | collector-otlp-allinone-grpc/0-install | Jaeger:kuttl-test-accepted-locust/my-jaeger created logger.go:42: 08:21:41 | collector-otlp-allinone-grpc/0-install | test step completed 0-install logger.go:42: 08:21:41 | collector-otlp-allinone-grpc/1-smoke-test | starting test step 1-smoke-test logger.go:42: 08:21:41 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 08:21:42 | collector-otlp-allinone-grpc/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:21:48 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=grpc ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 08:21:49 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:21:49 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/report-span created logger.go:42: 08:21:49 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/check-span created logger.go:42: 08:22:08 | collector-otlp-allinone-grpc/1-smoke-test | test step completed 1-smoke-test logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | collector-otlp-allinone-grpc events from ns kuttl-test-accepted-locust: logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:38 +0000 UTC Normal Pod my-jaeger-767dd9c99c-km29k Binding Scheduled Successfully assigned kuttl-test-accepted-locust/my-jaeger-767dd9c99c-km29k to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:38 +0000 UTC Normal ReplicaSet.apps my-jaeger-767dd9c99c SuccessfulCreate Created pod: my-jaeger-767dd9c99c-km29k replicaset-controller logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:38 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-767dd9c99c to 1 deployment-controller logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:39 +0000 UTC Normal Pod my-jaeger-767dd9c99c-km29k AddedInterface Add eth0 [10.128.2.71/23] from ovn-kubernetes logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:39 +0000 UTC Normal Pod my-jaeger-767dd9c99c-km29k.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:39 +0000 UTC Normal Pod my-jaeger-767dd9c99c-km29k.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:39 +0000 UTC Normal Pod my-jaeger-767dd9c99c-km29k.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:39 +0000 UTC Normal Pod my-jaeger-767dd9c99c-km29k.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:39 +0000 UTC Normal Pod my-jaeger-767dd9c99c-km29k.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:39 +0000 UTC Normal Pod my-jaeger-767dd9c99c-km29k.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:45 +0000 UTC Normal Pod my-jaeger-767dd9c99c-km29k.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:45 +0000 UTC Normal Pod my-jaeger-767dd9c99c-km29k.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:45 +0000 UTC Normal ReplicaSet.apps my-jaeger-767dd9c99c SuccessfulDelete Deleted pod: my-jaeger-767dd9c99c-km29k replicaset-controller logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:45 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-767dd9c99c to 0 from 1 deployment-controller logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:46 +0000 UTC Normal Pod my-jaeger-74fc7f8588-vvfzb Binding Scheduled Successfully assigned kuttl-test-accepted-locust/my-jaeger-74fc7f8588-vvfzb to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:46 +0000 UTC Normal ReplicaSet.apps my-jaeger-74fc7f8588 SuccessfulCreate Created pod: my-jaeger-74fc7f8588-vvfzb replicaset-controller logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:46 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-74fc7f8588 to 1 deployment-controller logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:47 +0000 UTC Normal Pod my-jaeger-74fc7f8588-vvfzb AddedInterface Add eth0 [10.128.2.72/23] from ovn-kubernetes logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:47 +0000 UTC Normal Pod my-jaeger-74fc7f8588-vvfzb.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:47 +0000 UTC Normal Pod my-jaeger-74fc7f8588-vvfzb.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:47 +0000 UTC Normal Pod my-jaeger-74fc7f8588-vvfzb.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:47 +0000 UTC Normal Pod my-jaeger-74fc7f8588-vvfzb.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:47 +0000 UTC Normal Pod my-jaeger-74fc7f8588-vvfzb.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:47 +0000 UTC Normal Pod my-jaeger-74fc7f8588-vvfzb.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:49 +0000 UTC Normal Pod check-span-g7xsf Binding Scheduled Successfully assigned kuttl-test-accepted-locust/check-span-g7xsf to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:49 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-g7xsf job-controller logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:49 +0000 UTC Normal Pod report-span-qmb7w Binding Scheduled Successfully assigned kuttl-test-accepted-locust/report-span-qmb7w to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:49 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-qmb7w job-controller logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:50 +0000 UTC Normal Pod check-span-g7xsf AddedInterface Add eth0 [10.131.0.65/23] from ovn-kubernetes logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:50 +0000 UTC Normal Pod check-span-g7xsf.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:50 +0000 UTC Normal Pod check-span-g7xsf.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:50 +0000 UTC Normal Pod check-span-g7xsf.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:50 +0000 UTC Normal Pod report-span-qmb7w AddedInterface Add eth0 [10.129.2.91/23] from ovn-kubernetes logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:50 +0000 UTC Normal Pod report-span-qmb7w.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:50 +0000 UTC Normal Pod report-span-qmb7w.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:21:50 +0000 UTC Normal Pod report-span-qmb7w.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | 2023-10-09 08:22:08 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:22:08 | collector-otlp-allinone-grpc | Deleting namespace: kuttl-test-accepted-locust === CONT kuttl/harness/collector-otlp-allinone-http logger.go:42: 08:22:20 | collector-otlp-allinone-http | Creating namespace: kuttl-test-golden-halibut logger.go:42: 08:22:20 | collector-otlp-allinone-http/0-install | starting test step 0-install logger.go:42: 08:22:20 | collector-otlp-allinone-http/0-install | Jaeger:kuttl-test-golden-halibut/my-jaeger created logger.go:42: 08:22:26 | collector-otlp-allinone-http/0-install | test step completed 0-install logger.go:42: 08:22:26 | collector-otlp-allinone-http/1-smoke-test | starting test step 1-smoke-test logger.go:42: 08:22:26 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 08:22:28 | collector-otlp-allinone-http/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:22:34 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=http ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 08:22:34 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:22:35 | collector-otlp-allinone-http/1-smoke-test | job.batch/report-span created logger.go:42: 08:22:35 | collector-otlp-allinone-http/1-smoke-test | job.batch/check-span created logger.go:42: 08:22:47 | collector-otlp-allinone-http/1-smoke-test | test step completed 1-smoke-test logger.go:42: 08:22:47 | collector-otlp-allinone-http | collector-otlp-allinone-http events from ns kuttl-test-golden-halibut: logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:24 +0000 UTC Normal Pod my-jaeger-688b4fc7d7-sr5c5 Binding Scheduled Successfully assigned kuttl-test-golden-halibut/my-jaeger-688b4fc7d7-sr5c5 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:24 +0000 UTC Normal Pod my-jaeger-688b4fc7d7-sr5c5 AddedInterface Add eth0 [10.128.2.73/23] from ovn-kubernetes logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:24 +0000 UTC Normal Pod my-jaeger-688b4fc7d7-sr5c5.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:24 +0000 UTC Normal ReplicaSet.apps my-jaeger-688b4fc7d7 SuccessfulCreate Created pod: my-jaeger-688b4fc7d7-sr5c5 replicaset-controller logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:24 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-688b4fc7d7 to 1 deployment-controller logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:25 +0000 UTC Normal Pod my-jaeger-688b4fc7d7-sr5c5.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:25 +0000 UTC Normal Pod my-jaeger-688b4fc7d7-sr5c5.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:25 +0000 UTC Normal Pod my-jaeger-688b4fc7d7-sr5c5.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:25 +0000 UTC Normal Pod my-jaeger-688b4fc7d7-sr5c5.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:25 +0000 UTC Normal Pod my-jaeger-688b4fc7d7-sr5c5.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:29 +0000 UTC Normal Pod my-jaeger-5c699f44d9-mtvg9 Binding Scheduled Successfully assigned kuttl-test-golden-halibut/my-jaeger-5c699f44d9-mtvg9 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:29 +0000 UTC Normal ReplicaSet.apps my-jaeger-5c699f44d9 SuccessfulCreate Created pod: my-jaeger-5c699f44d9-mtvg9 replicaset-controller logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:29 +0000 UTC Normal Pod my-jaeger-688b4fc7d7-sr5c5.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:29 +0000 UTC Normal Pod my-jaeger-688b4fc7d7-sr5c5.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:29 +0000 UTC Normal ReplicaSet.apps my-jaeger-688b4fc7d7 SuccessfulDelete Deleted pod: my-jaeger-688b4fc7d7-sr5c5 replicaset-controller logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:29 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-688b4fc7d7 to 0 from 1 deployment-controller logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:29 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-5c699f44d9 to 1 deployment-controller logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:30 +0000 UTC Normal Pod my-jaeger-5c699f44d9-mtvg9 AddedInterface Add eth0 [10.128.2.74/23] from ovn-kubernetes logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:30 +0000 UTC Normal Pod my-jaeger-5c699f44d9-mtvg9.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:30 +0000 UTC Normal Pod my-jaeger-5c699f44d9-mtvg9.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:30 +0000 UTC Normal Pod my-jaeger-5c699f44d9-mtvg9.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:30 +0000 UTC Normal Pod my-jaeger-5c699f44d9-mtvg9.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:30 +0000 UTC Normal Pod my-jaeger-5c699f44d9-mtvg9.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:30 +0000 UTC Normal Pod my-jaeger-5c699f44d9-mtvg9.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:35 +0000 UTC Normal Pod check-span-55sgx Binding Scheduled Successfully assigned kuttl-test-golden-halibut/check-span-55sgx to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:35 +0000 UTC Normal Pod check-span-55sgx AddedInterface Add eth0 [10.131.0.66/23] from ovn-kubernetes logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:35 +0000 UTC Normal Pod check-span-55sgx.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:35 +0000 UTC Normal Pod check-span-55sgx.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:35 +0000 UTC Normal Pod check-span-55sgx.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:35 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-55sgx job-controller logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:35 +0000 UTC Normal Pod report-span-4tn5t Binding Scheduled Successfully assigned kuttl-test-golden-halibut/report-span-4tn5t to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:35 +0000 UTC Normal Pod report-span-4tn5t AddedInterface Add eth0 [10.129.2.92/23] from ovn-kubernetes logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:35 +0000 UTC Normal Pod report-span-4tn5t.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:35 +0000 UTC Normal Pod report-span-4tn5t.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:35 +0000 UTC Normal Pod report-span-4tn5t.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:35 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-4tn5t job-controller logger.go:42: 08:22:47 | collector-otlp-allinone-http | 2023-10-09 08:22:46 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:22:47 | collector-otlp-allinone-http | Deleting namespace: kuttl-test-golden-halibut === CONT kuttl/harness/collector-autoscale logger.go:42: 08:22:59 | collector-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:22:59 | collector-autoscale | Ignoring wait-for-hpa.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:22:59 | collector-autoscale | Creating namespace: kuttl-test-busy-moose logger.go:42: 08:22:59 | collector-autoscale/1-install | starting test step 1-install logger.go:42: 08:22:59 | collector-autoscale/1-install | Jaeger:kuttl-test-busy-moose/simple-prod created logger.go:42: 08:23:35 | collector-autoscale/1-install | test step completed 1-install logger.go:42: 08:23:35 | collector-autoscale/2-wait-for-hpa | starting test step 2-wait-for-hpa logger.go:42: 08:23:35 | collector-autoscale/2-wait-for-hpa | running command: [sh -c ./wait-for-hpa.sh] logger.go:42: 08:23:35 | collector-autoscale/2-wait-for-hpa | Some HPA metrics are not known yet logger.go:42: 08:23:36 | collector-autoscale/2-wait-for-hpa | test step completed 2-wait-for-hpa logger.go:42: 08:23:36 | collector-autoscale/3- | starting test step 3- logger.go:42: 08:23:36 | collector-autoscale/3- | test step completed 3- logger.go:42: 08:23:36 | collector-autoscale | collector-autoscale events from ns kuttl-test-busy-moose: logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:05 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestbusymoosesimpleprod-1-c7657564c SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestbusymoosesimpleprod-1-c7657564crqzlc replicaset-controller logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbusymoosesimpleprod-1-c7657564crqzlc Binding Scheduled Successfully assigned kuttl-test-busy-moose/elasticsearch-cdm-kuttltestbusymoosesimpleprod-1-c7657564crqzlc to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbusymoosesimpleprod-1-c7657564crqzlc AddedInterface Add eth0 [10.128.2.75/23] from ovn-kubernetes logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbusymoosesimpleprod-1-c7657564crqzlc.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbusymoosesimpleprod-1-c7657564crqzlc.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbusymoosesimpleprod-1-c7657564crqzlc.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbusymoosesimpleprod-1-c7657564crqzlc.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbusymoosesimpleprod-1-c7657564crqzlc.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbusymoosesimpleprod-1-c7657564crqzlc.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:05 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestbusymoosesimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestbusymoosesimpleprod-1-c7657564c to 1 deployment-controller logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:15 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestbusymoosesimpleprod-1-c7657564crqzlc.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:20 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestbusymoosesimpleprod-1-c7657564crqzlc.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:32 +0000 UTC Normal Pod simple-prod-collector-b86d94b64-79fnt Binding Scheduled Successfully assigned kuttl-test-busy-moose/simple-prod-collector-b86d94b64-79fnt to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:32 +0000 UTC Normal Pod simple-prod-collector-b86d94b64-79fnt AddedInterface Add eth0 [10.129.2.93/23] from ovn-kubernetes logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:32 +0000 UTC Normal Pod simple-prod-collector-b86d94b64-79fnt.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:32 +0000 UTC Normal Pod simple-prod-collector-b86d94b64-79fnt.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:32 +0000 UTC Normal Pod simple-prod-collector-b86d94b64-79fnt.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:32 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-b86d94b64 SuccessfulCreate Created pod: simple-prod-collector-b86d94b64-79fnt replicaset-controller logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:32 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-b86d94b64 to 1 deployment-controller logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:32 +0000 UTC Normal Pod simple-prod-query-55b97f986c-hwtqn Binding Scheduled Successfully assigned kuttl-test-busy-moose/simple-prod-query-55b97f986c-hwtqn to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:32 +0000 UTC Normal Pod simple-prod-query-55b97f986c-hwtqn AddedInterface Add eth0 [10.131.0.67/23] from ovn-kubernetes logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:32 +0000 UTC Normal Pod simple-prod-query-55b97f986c-hwtqn.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:32 +0000 UTC Normal Pod simple-prod-query-55b97f986c-hwtqn.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:32 +0000 UTC Normal Pod simple-prod-query-55b97f986c-hwtqn.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:32 +0000 UTC Normal Pod simple-prod-query-55b97f986c-hwtqn.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:32 +0000 UTC Normal Pod simple-prod-query-55b97f986c-hwtqn.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:32 +0000 UTC Normal Pod simple-prod-query-55b97f986c-hwtqn.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:32 +0000 UTC Normal Pod simple-prod-query-55b97f986c-hwtqn.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:32 +0000 UTC Normal Pod simple-prod-query-55b97f986c-hwtqn.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:32 +0000 UTC Normal Pod simple-prod-query-55b97f986c-hwtqn.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:32 +0000 UTC Normal ReplicaSet.apps simple-prod-query-55b97f986c SuccessfulCreate Created pod: simple-prod-query-55b97f986c-hwtqn replicaset-controller logger.go:42: 08:23:36 | collector-autoscale | 2023-10-09 08:23:32 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-55b97f986c to 1 deployment-controller logger.go:42: 08:23:36 | collector-autoscale | Deleting namespace: kuttl-test-busy-moose === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (331.42s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.98s) --- PASS: kuttl/harness/collector-otlp-production-grpc (78.71s) --- PASS: kuttl/harness/set-custom-img (47.93s) --- PASS: kuttl/harness/collector-otlp-production-http (70.77s) --- PASS: kuttl/harness/collector-otlp-allinone-grpc (45.93s) --- PASS: kuttl/harness/collector-otlp-allinone-http (38.45s) --- PASS: kuttl/harness/collector-autoscale (43.61s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name miscellaneous --report --output /logs/artifacts/miscellaneous.xml ./artifacts/kuttl-report.xml time="2023-10-09T08:23:43Z" level=debug msg="Setting a new name for the test suites" time="2023-10-09T08:23:43Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-10-09T08:23:43Z" level=debug msg="normalizing test case names" time="2023-10-09T08:23:43Z" level=debug msg="miscellaneous/artifacts -> miscellaneous_artifacts" time="2023-10-09T08:23:43Z" level=debug msg="miscellaneous/collector-otlp-production-grpc -> miscellaneous_collector_otlp_production_grpc" time="2023-10-09T08:23:43Z" level=debug msg="miscellaneous/set-custom-img -> miscellaneous_set_custom_img" time="2023-10-09T08:23:43Z" level=debug msg="miscellaneous/collector-otlp-production-http -> miscellaneous_collector_otlp_production_http" time="2023-10-09T08:23:43Z" level=debug msg="miscellaneous/collector-otlp-allinone-grpc -> miscellaneous_collector_otlp_allinone_grpc" time="2023-10-09T08:23:43Z" level=debug msg="miscellaneous/collector-otlp-allinone-http -> miscellaneous_collector_otlp_allinone_http" time="2023-10-09T08:23:43Z" level=debug msg="miscellaneous/collector-autoscale -> miscellaneous_collector_autoscale" +----------------------------------------------+--------+ | NAME | RESULT | +----------------------------------------------+--------+ | miscellaneous_artifacts | passed | | miscellaneous_collector_otlp_production_grpc | passed | | miscellaneous_set_custom_img | passed | | miscellaneous_collector_otlp_production_http | passed | | miscellaneous_collector_otlp_allinone_grpc | passed | | miscellaneous_collector_otlp_allinone_http | passed | | miscellaneous_collector_autoscale | passed | +----------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 2 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + '[' 1 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh sidecar false true + '[' 3 -ne 3 ']' + test_suite_name=sidecar + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/sidecar.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-sidecar make[2]: Entering directory '/tmp/jaeger-tests' ./tests/e2e/sidecar/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-10-06-234925 True False 57m Cluster version is 4.14.0-0.nightly-2023-10-06-234925' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-10-06-234925 True False 57m Cluster version is 4.14.0-0.nightly-2023-10-06-234925' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/sidecar/render.sh ++ export SUITE_DIR=./tests/e2e/sidecar ++ SUITE_DIR=./tests/e2e/sidecar ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/sidecar ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + jaeger_service_name=order + start_test sidecar-deployment + '[' 1 -ne 1 ']' + test_name=sidecar-deployment + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-deployment' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-deployment\e[0m' Rendering files for test sidecar-deployment + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build + '[' _build '!=' _build ']' + mkdir -p sidecar-deployment + cd sidecar-deployment + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-namespace + '[' 1 -ne 1 ']' + test_name=sidecar-namespace + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-namespace' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-namespace\e[0m' Rendering files for test sidecar-namespace + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-deployment + '[' sidecar-deployment '!=' _build ']' + cd .. + mkdir -p sidecar-namespace + cd sidecar-namespace + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-skip-webhook + '[' 1 -ne 1 ']' + test_name=sidecar-skip-webhook + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-skip-webhook' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-skip-webhook\e[0m' Rendering files for test sidecar-skip-webhook + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-namespace + '[' sidecar-namespace '!=' _build ']' + cd .. + mkdir -p sidecar-skip-webhook + cd sidecar-skip-webhook + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running sidecar E2E tests' Running sidecar E2E tests + cd tests/e2e/sidecar/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1500832312 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 4 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/sidecar-deployment === PAUSE kuttl/harness/sidecar-deployment === RUN kuttl/harness/sidecar-namespace === PAUSE kuttl/harness/sidecar-namespace === RUN kuttl/harness/sidecar-skip-webhook === PAUSE kuttl/harness/sidecar-skip-webhook === CONT kuttl/harness/artifacts logger.go:42: 08:23:50 | artifacts | Creating namespace: kuttl-test-hot-jaguar logger.go:42: 08:23:50 | artifacts | artifacts events from ns kuttl-test-hot-jaguar: logger.go:42: 08:23:50 | artifacts | Deleting namespace: kuttl-test-hot-jaguar === CONT kuttl/harness/sidecar-namespace logger.go:42: 08:23:56 | sidecar-namespace | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:23:56 | sidecar-namespace | Creating namespace: kuttl-test-game-racer logger.go:42: 08:23:56 | sidecar-namespace/0-install | starting test step 0-install logger.go:42: 08:23:56 | sidecar-namespace/0-install | Jaeger:kuttl-test-game-racer/agent-as-sidecar created logger.go:42: 08:24:01 | sidecar-namespace/0-install | test step completed 0-install logger.go:42: 08:24:01 | sidecar-namespace/1-install | starting test step 1-install logger.go:42: 08:24:01 | sidecar-namespace/1-install | Deployment:kuttl-test-game-racer/vertx-create-span-sidecar created logger.go:42: 08:24:03 | sidecar-namespace/1-install | test step completed 1-install logger.go:42: 08:24:03 | sidecar-namespace/2-enable-injection | starting test step 2-enable-injection logger.go:42: 08:24:03 | sidecar-namespace/2-enable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="true"] logger.go:42: 08:24:04 | sidecar-namespace/2-enable-injection | namespace/kuttl-test-game-racer annotated logger.go:42: 08:24:06 | sidecar-namespace/2-enable-injection | test step completed 2-enable-injection logger.go:42: 08:24:06 | sidecar-namespace/3-find-service | starting test step 3-find-service logger.go:42: 08:24:06 | sidecar-namespace/3-find-service | Job:kuttl-test-game-racer/00-find-service created logger.go:42: 08:24:18 | sidecar-namespace/3-find-service | test step completed 3-find-service logger.go:42: 08:24:18 | sidecar-namespace/4-other-instance | starting test step 4-other-instance logger.go:42: 08:24:18 | sidecar-namespace/4-other-instance | Jaeger:kuttl-test-game-racer/agent-as-sidecar2 created logger.go:42: 08:24:26 | sidecar-namespace/4-other-instance | test step completed 4-other-instance logger.go:42: 08:24:26 | sidecar-namespace/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 08:24:27 | sidecar-namespace/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 08:24:27 | sidecar-namespace/6-find-service | starting test step 6-find-service logger.go:42: 08:24:27 | sidecar-namespace/6-find-service | Job:kuttl-test-game-racer/01-find-service created logger.go:42: 08:24:39 | sidecar-namespace/6-find-service | test step completed 6-find-service logger.go:42: 08:24:39 | sidecar-namespace/7-disable-injection | starting test step 7-disable-injection logger.go:42: 08:24:39 | sidecar-namespace/7-disable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="false"] logger.go:42: 08:24:39 | sidecar-namespace/7-disable-injection | namespace/kuttl-test-game-racer annotated logger.go:42: 08:24:40 | sidecar-namespace/7-disable-injection | test step completed 7-disable-injection logger.go:42: 08:24:40 | sidecar-namespace | sidecar-namespace events from ns kuttl-test-game-racer: logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:00 +0000 UTC Normal Pod agent-as-sidecar-5b4d549f66-qp4j4 Binding Scheduled Successfully assigned kuttl-test-game-racer/agent-as-sidecar-5b4d549f66-qp4j4 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:00 +0000 UTC Normal Pod agent-as-sidecar-5b4d549f66-qp4j4 AddedInterface Add eth0 [10.128.2.76/23] from ovn-kubernetes logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:00 +0000 UTC Normal Pod agent-as-sidecar-5b4d549f66-qp4j4.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:00 +0000 UTC Normal Pod agent-as-sidecar-5b4d549f66-qp4j4.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:00 +0000 UTC Normal Pod agent-as-sidecar-5b4d549f66-qp4j4.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:00 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-5b4d549f66 SuccessfulCreate Created pod: agent-as-sidecar-5b4d549f66-qp4j4 replicaset-controller logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:00 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-5b4d549f66 to 1 deployment-controller logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:01 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-cxq7s Binding Scheduled Successfully assigned kuttl-test-game-racer/vertx-create-span-sidecar-568b7c9f6f-cxq7s to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:01 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-568b7c9f6f SuccessfulCreate Created pod: vertx-create-span-sidecar-568b7c9f6f-cxq7s replicaset-controller logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:01 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-568b7c9f6f to 1 deployment-controller logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:02 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-cxq7s AddedInterface Add eth0 [10.129.2.94/23] from ovn-kubernetes logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:02 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-cxq7s.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:02 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-cxq7s.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:02 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-cxq7s.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:04 +0000 UTC Normal Pod vertx-create-span-sidecar-67df6b5d8f-dwbnw Binding Scheduled Successfully assigned kuttl-test-game-racer/vertx-create-span-sidecar-67df6b5d8f-dwbnw to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:04 +0000 UTC Normal Pod vertx-create-span-sidecar-67df6b5d8f-dwbnw AddedInterface Add eth0 [10.131.0.68/23] from ovn-kubernetes logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:04 +0000 UTC Normal Pod vertx-create-span-sidecar-67df6b5d8f-dwbnw.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:04 +0000 UTC Normal Pod vertx-create-span-sidecar-67df6b5d8f-dwbnw.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:04 +0000 UTC Normal Pod vertx-create-span-sidecar-67df6b5d8f-dwbnw.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:04 +0000 UTC Normal Pod vertx-create-span-sidecar-67df6b5d8f-dwbnw.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:04 +0000 UTC Normal Pod vertx-create-span-sidecar-67df6b5d8f-dwbnw.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:04 +0000 UTC Normal Pod vertx-create-span-sidecar-67df6b5d8f-dwbnw.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:04 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-67df6b5d8f SuccessfulCreate Created pod: vertx-create-span-sidecar-67df6b5d8f-dwbnw replicaset-controller logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:04 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-67df6b5d8f to 1 deployment-controller logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:06 +0000 UTC Normal Pod 00-find-service-hpr7s Binding Scheduled Successfully assigned kuttl-test-game-racer/00-find-service-hpr7s to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:06 +0000 UTC Normal Pod 00-find-service-hpr7s AddedInterface Add eth0 [10.128.2.77/23] from ovn-kubernetes logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:06 +0000 UTC Normal Pod 00-find-service-hpr7s.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:06 +0000 UTC Normal Pod 00-find-service-hpr7s.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:06 +0000 UTC Normal Pod 00-find-service-hpr7s.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:06 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-hpr7s job-controller logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:10 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-cxq7s.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.129.2.94:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:10 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-cxq7s.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.94:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:12 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-cxq7s.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:12 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-cxq7s.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.94:8080/": read tcp 10.129.2.2:33010->10.129.2.94:8080: read: connection reset by peer kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:12 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-cxq7s.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.94:8080/": dial tcp 10.129.2.94:8080: connect: connection refused kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:12 +0000 UTC Warning Pod vertx-create-span-sidecar-67df6b5d8f-dwbnw.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.68:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:12 +0000 UTC Warning Pod vertx-create-span-sidecar-67df6b5d8f-dwbnw.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.68:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:14 +0000 UTC Normal Pod vertx-create-span-sidecar-67df6b5d8f-dwbnw.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:15 +0000 UTC Warning Pod vertx-create-span-sidecar-67df6b5d8f-dwbnw.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.68:8080/": read tcp 10.131.0.2:58244->10.131.0.68:8080: read: connection reset by peer kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:15 +0000 UTC Warning Pod vertx-create-span-sidecar-67df6b5d8f-dwbnw.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.68:8080/": dial tcp 10.131.0.68:8080: connect: connection refused kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:17 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:22 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-cxq7s.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.94:8080/": read tcp 10.129.2.2:45670->10.129.2.94:8080: read: connection reset by peer kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:24 +0000 UTC Normal Pod agent-as-sidecar2-56dcf466f6-jdfj6 Binding Scheduled Successfully assigned kuttl-test-game-racer/agent-as-sidecar2-56dcf466f6-jdfj6 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:24 +0000 UTC Normal Pod agent-as-sidecar2-56dcf466f6-jdfj6 AddedInterface Add eth0 [10.128.2.78/23] from ovn-kubernetes logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:24 +0000 UTC Normal Pod agent-as-sidecar2-56dcf466f6-jdfj6.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:24 +0000 UTC Normal Pod agent-as-sidecar2-56dcf466f6-jdfj6.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:24 +0000 UTC Normal Pod agent-as-sidecar2-56dcf466f6-jdfj6.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:24 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-56dcf466f6 SuccessfulCreate Created pod: agent-as-sidecar2-56dcf466f6-jdfj6 replicaset-controller logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:24 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-56dcf466f6 to 1 deployment-controller logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:26 +0000 UTC Normal Pod agent-as-sidecar-5b4d549f66-qp4j4.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:27 +0000 UTC Normal Pod 01-find-service-z6z8w Binding Scheduled Successfully assigned kuttl-test-game-racer/01-find-service-z6z8w to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:27 +0000 UTC Normal Pod 01-find-service-z6z8w AddedInterface Add eth0 [10.129.2.95/23] from ovn-kubernetes logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:27 +0000 UTC Normal Pod 01-find-service-z6z8w.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:27 +0000 UTC Normal Pod 01-find-service-z6z8w.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:27 +0000 UTC Normal Pod 01-find-service-z6z8w.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:27 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-z6z8w job-controller logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:27 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-568b7c9f6f SuccessfulDelete Deleted pod: vertx-create-span-sidecar-568b7c9f6f-cxq7s replicaset-controller logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:27 +0000 UTC Normal Pod vertx-create-span-sidecar-9b874cfd7-lsnqx Binding Scheduled Successfully assigned kuttl-test-game-racer/vertx-create-span-sidecar-9b874cfd7-lsnqx to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:27 +0000 UTC Normal Pod vertx-create-span-sidecar-9b874cfd7-lsnqx AddedInterface Add eth0 [10.128.2.79/23] from ovn-kubernetes logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:27 +0000 UTC Normal Pod vertx-create-span-sidecar-9b874cfd7-lsnqx.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:27 +0000 UTC Normal Pod vertx-create-span-sidecar-9b874cfd7-lsnqx.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:27 +0000 UTC Normal Pod vertx-create-span-sidecar-9b874cfd7-lsnqx.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:27 +0000 UTC Normal Pod vertx-create-span-sidecar-9b874cfd7-lsnqx.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:27 +0000 UTC Normal Pod vertx-create-span-sidecar-9b874cfd7-lsnqx.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:27 +0000 UTC Normal Pod vertx-create-span-sidecar-9b874cfd7-lsnqx.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:27 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-9b874cfd7 SuccessfulCreate Created pod: vertx-create-span-sidecar-9b874cfd7-lsnqx replicaset-controller logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:27 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-568b7c9f6f to 0 from 1 deployment-controller logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:27 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-9b874cfd7 to 1 from 0 deployment-controller logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:35 +0000 UTC Warning Pod vertx-create-span-sidecar-9b874cfd7-lsnqx.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.79:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:35 +0000 UTC Warning Pod vertx-create-span-sidecar-9b874cfd7-lsnqx.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.79:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:37 +0000 UTC Normal Pod vertx-create-span-sidecar-9b874cfd7-lsnqx.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:38 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:38 +0000 UTC Warning Pod vertx-create-span-sidecar-9b874cfd7-lsnqx.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.79:8080/": read tcp 10.128.2.2:49172->10.128.2.79:8080: read: connection reset by peer kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:38 +0000 UTC Warning Pod vertx-create-span-sidecar-9b874cfd7-lsnqx.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.79:8080/": dial tcp 10.128.2.79:8080: connect: connection refused kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:39 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-67df6b5d8f SuccessfulDelete Deleted pod: vertx-create-span-sidecar-67df6b5d8f-dwbnw replicaset-controller logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:39 +0000 UTC Normal Pod vertx-create-span-sidecar-747d5f4c58-4j9dg Binding Scheduled Successfully assigned kuttl-test-game-racer/vertx-create-span-sidecar-747d5f4c58-4j9dg to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:39 +0000 UTC Normal Pod vertx-create-span-sidecar-747d5f4c58-4j9dg AddedInterface Add eth0 [10.129.2.96/23] from ovn-kubernetes logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:39 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-747d5f4c58 SuccessfulCreate Created pod: vertx-create-span-sidecar-747d5f4c58-4j9dg replicaset-controller logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:39 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-67df6b5d8f to 0 from 1 deployment-controller logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:39 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-747d5f4c58 to 1 from 0 deployment-controller logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:40 +0000 UTC Normal Pod vertx-create-span-sidecar-747d5f4c58-4j9dg.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:40 +0000 UTC Normal Pod vertx-create-span-sidecar-747d5f4c58-4j9dg.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:24:40 | sidecar-namespace | 2023-10-09 08:24:40 +0000 UTC Normal Pod vertx-create-span-sidecar-747d5f4c58-4j9dg.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:24:40 | sidecar-namespace | Deleting namespace: kuttl-test-game-racer === CONT kuttl/harness/sidecar-skip-webhook logger.go:42: 08:24:47 | sidecar-skip-webhook | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:24:47 | sidecar-skip-webhook | Creating namespace: kuttl-test-central-werewolf logger.go:42: 08:24:47 | sidecar-skip-webhook/0-install | starting test step 0-install logger.go:42: 08:24:47 | sidecar-skip-webhook/0-install | Jaeger:kuttl-test-central-werewolf/agent-as-sidecar created logger.go:42: 08:24:52 | sidecar-skip-webhook/0-install | test step completed 0-install logger.go:42: 08:24:52 | sidecar-skip-webhook/1-install | starting test step 1-install logger.go:42: 08:24:52 | sidecar-skip-webhook/1-install | Deployment:kuttl-test-central-werewolf/vertx-create-span-sidecar created logger.go:42: 08:24:53 | sidecar-skip-webhook/1-install | test step completed 1-install logger.go:42: 08:24:53 | sidecar-skip-webhook/2-add-anotation-and-label | starting test step 2-add-anotation-and-label logger.go:42: 08:24:53 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name=jaeger-operator --namespace kuttl-test-central-werewolf] logger.go:42: 08:24:53 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar labeled logger.go:42: 08:24:53 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-central-werewolf] logger.go:42: 08:24:53 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar annotated logger.go:42: 08:24:53 | sidecar-skip-webhook/2-add-anotation-and-label | test step completed 2-add-anotation-and-label logger.go:42: 08:24:53 | sidecar-skip-webhook/3-remove-label | starting test step 3-remove-label logger.go:42: 08:24:53 | sidecar-skip-webhook/3-remove-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name- --namespace kuttl-test-central-werewolf] logger.go:42: 08:24:54 | sidecar-skip-webhook/3-remove-label | deployment.apps/vertx-create-span-sidecar unlabeled logger.go:42: 08:24:55 | sidecar-skip-webhook/3-remove-label | test step completed 3-remove-label logger.go:42: 08:24:55 | sidecar-skip-webhook | sidecar-skip-webhook events from ns kuttl-test-central-werewolf: logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:51 +0000 UTC Normal Pod agent-as-sidecar-f65547895-64zmj Binding Scheduled Successfully assigned kuttl-test-central-werewolf/agent-as-sidecar-f65547895-64zmj to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:51 +0000 UTC Normal Pod agent-as-sidecar-f65547895-64zmj AddedInterface Add eth0 [10.128.2.80/23] from ovn-kubernetes logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:51 +0000 UTC Normal Pod agent-as-sidecar-f65547895-64zmj.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:51 +0000 UTC Normal Pod agent-as-sidecar-f65547895-64zmj.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:51 +0000 UTC Normal Pod agent-as-sidecar-f65547895-64zmj.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:51 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-f65547895 SuccessfulCreate Created pod: agent-as-sidecar-f65547895-64zmj replicaset-controller logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:51 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-f65547895 to 1 deployment-controller logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:52 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-8sdkw Binding Scheduled Successfully assigned kuttl-test-central-werewolf/vertx-create-span-sidecar-568b7c9f6f-8sdkw to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:52 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-568b7c9f6f SuccessfulCreate Created pod: vertx-create-span-sidecar-568b7c9f6f-8sdkw replicaset-controller logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:52 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-568b7c9f6f to 1 deployment-controller logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:53 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-8sdkw AddedInterface Add eth0 [10.129.2.97/23] from ovn-kubernetes logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:53 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-8sdkw.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:53 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-8sdkw.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:53 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-8sdkw.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:53 +0000 UTC Normal Pod vertx-create-span-sidecar-f4b4f78b8-7bkmx Binding Scheduled Successfully assigned kuttl-test-central-werewolf/vertx-create-span-sidecar-f4b4f78b8-7bkmx to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:53 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-f4b4f78b8 SuccessfulCreate Created pod: vertx-create-span-sidecar-f4b4f78b8-7bkmx replicaset-controller logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:53 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-f4b4f78b8 to 1 deployment-controller logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:54 +0000 UTC Normal Pod vertx-create-span-sidecar-f4b4f78b8-7bkmx AddedInterface Add eth0 [10.131.0.69/23] from ovn-kubernetes logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:54 +0000 UTC Normal Pod vertx-create-span-sidecar-f4b4f78b8-7bkmx.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:54 +0000 UTC Normal Pod vertx-create-span-sidecar-f4b4f78b8-7bkmx.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:54 +0000 UTC Normal Pod vertx-create-span-sidecar-f4b4f78b8-7bkmx.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:54 +0000 UTC Normal Pod vertx-create-span-sidecar-f4b4f78b8-7bkmx.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:54 +0000 UTC Normal Pod vertx-create-span-sidecar-f4b4f78b8-7bkmx.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:24:55 | sidecar-skip-webhook | 2023-10-09 08:24:54 +0000 UTC Normal Pod vertx-create-span-sidecar-f4b4f78b8-7bkmx.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:24:55 | sidecar-skip-webhook | Deleting namespace: kuttl-test-central-werewolf === CONT kuttl/harness/sidecar-deployment logger.go:42: 08:25:01 | sidecar-deployment | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:25:01 | sidecar-deployment | Creating namespace: kuttl-test-sweet-monkey logger.go:42: 08:25:01 | sidecar-deployment/0-install | starting test step 0-install logger.go:42: 08:25:01 | sidecar-deployment/0-install | Jaeger:kuttl-test-sweet-monkey/agent-as-sidecar created logger.go:42: 08:25:07 | sidecar-deployment/0-install | test step completed 0-install logger.go:42: 08:25:07 | sidecar-deployment/1-install | starting test step 1-install logger.go:42: 08:25:07 | sidecar-deployment/1-install | Deployment:kuttl-test-sweet-monkey/vertx-create-span-sidecar created logger.go:42: 08:25:09 | sidecar-deployment/1-install | test step completed 1-install logger.go:42: 08:25:09 | sidecar-deployment/2-enable-injection | starting test step 2-enable-injection logger.go:42: 08:25:09 | sidecar-deployment/2-enable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-sweet-monkey] logger.go:42: 08:25:09 | sidecar-deployment/2-enable-injection | deployment.apps/vertx-create-span-sidecar annotated logger.go:42: 08:25:11 | sidecar-deployment/2-enable-injection | test step completed 2-enable-injection logger.go:42: 08:25:11 | sidecar-deployment/3-find-service | starting test step 3-find-service logger.go:42: 08:25:11 | sidecar-deployment/3-find-service | Job:kuttl-test-sweet-monkey/00-find-service created logger.go:42: 08:25:22 | sidecar-deployment/3-find-service | test step completed 3-find-service logger.go:42: 08:25:22 | sidecar-deployment/4-other-instance | starting test step 4-other-instance logger.go:42: 08:25:22 | sidecar-deployment/4-other-instance | Jaeger:kuttl-test-sweet-monkey/agent-as-sidecar2 created logger.go:42: 08:25:28 | sidecar-deployment/4-other-instance | test step completed 4-other-instance logger.go:42: 08:25:28 | sidecar-deployment/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 08:25:29 | sidecar-deployment/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 08:25:29 | sidecar-deployment/6-find-service | starting test step 6-find-service logger.go:42: 08:25:29 | sidecar-deployment/6-find-service | Job:kuttl-test-sweet-monkey/01-find-service created logger.go:42: 08:25:48 | sidecar-deployment/6-find-service | test step completed 6-find-service logger.go:42: 08:25:48 | sidecar-deployment/7-disable-injection | starting test step 7-disable-injection logger.go:42: 08:25:48 | sidecar-deployment/7-disable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=false --namespace kuttl-test-sweet-monkey] logger.go:42: 08:25:48 | sidecar-deployment/7-disable-injection | deployment.apps/vertx-create-span-sidecar annotated logger.go:42: 08:25:50 | sidecar-deployment/7-disable-injection | test step completed 7-disable-injection logger.go:42: 08:25:50 | sidecar-deployment | sidecar-deployment events from ns kuttl-test-sweet-monkey: logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:04 +0000 UTC Normal Pod agent-as-sidecar-5fbf46f66b-schkz Binding Scheduled Successfully assigned kuttl-test-sweet-monkey/agent-as-sidecar-5fbf46f66b-schkz to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:04 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-5fbf46f66b SuccessfulCreate Created pod: agent-as-sidecar-5fbf46f66b-schkz replicaset-controller logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:04 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-5fbf46f66b to 1 deployment-controller logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:05 +0000 UTC Normal Pod agent-as-sidecar-5fbf46f66b-schkz AddedInterface Add eth0 [10.128.2.81/23] from ovn-kubernetes logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:05 +0000 UTC Normal Pod agent-as-sidecar-5fbf46f66b-schkz.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:05 +0000 UTC Normal Pod agent-as-sidecar-5fbf46f66b-schkz.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:05 +0000 UTC Normal Pod agent-as-sidecar-5fbf46f66b-schkz.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:07 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-ffj5k Binding Scheduled Successfully assigned kuttl-test-sweet-monkey/vertx-create-span-sidecar-568b7c9f6f-ffj5k to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:07 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-ffj5k AddedInterface Add eth0 [10.129.2.98/23] from ovn-kubernetes logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:07 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-ffj5k.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:07 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-ffj5k.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:07 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-ffj5k.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:07 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-568b7c9f6f SuccessfulCreate Created pod: vertx-create-span-sidecar-568b7c9f6f-ffj5k replicaset-controller logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:07 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-568b7c9f6f to 1 deployment-controller logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:09 +0000 UTC Normal Pod vertx-create-span-sidecar-544db6f48f-hfqjq Binding Scheduled Successfully assigned kuttl-test-sweet-monkey/vertx-create-span-sidecar-544db6f48f-hfqjq to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:09 +0000 UTC Normal Pod vertx-create-span-sidecar-544db6f48f-hfqjq AddedInterface Add eth0 [10.131.0.70/23] from ovn-kubernetes logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:09 +0000 UTC Normal Pod vertx-create-span-sidecar-544db6f48f-hfqjq.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:09 +0000 UTC Normal Pod vertx-create-span-sidecar-544db6f48f-hfqjq.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:09 +0000 UTC Normal Pod vertx-create-span-sidecar-544db6f48f-hfqjq.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:09 +0000 UTC Normal Pod vertx-create-span-sidecar-544db6f48f-hfqjq.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:09 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-544db6f48f SuccessfulCreate Created pod: vertx-create-span-sidecar-544db6f48f-hfqjq replicaset-controller logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:09 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-544db6f48f to 1 deployment-controller logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:10 +0000 UTC Normal Pod vertx-create-span-sidecar-544db6f48f-hfqjq.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:10 +0000 UTC Normal Pod vertx-create-span-sidecar-544db6f48f-hfqjq.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:11 +0000 UTC Normal Pod 00-find-service-8v2jt Binding Scheduled Successfully assigned kuttl-test-sweet-monkey/00-find-service-8v2jt to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:11 +0000 UTC Normal Pod 00-find-service-8v2jt AddedInterface Add eth0 [10.128.2.82/23] from ovn-kubernetes logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:11 +0000 UTC Normal Pod 00-find-service-8v2jt.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:11 +0000 UTC Normal Pod 00-find-service-8v2jt.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:11 +0000 UTC Normal Pod 00-find-service-8v2jt.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:11 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-8v2jt job-controller logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:15 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-ffj5k.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.129.2.98:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:15 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-ffj5k.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.98:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:17 +0000 UTC Warning Pod vertx-create-span-sidecar-544db6f48f-hfqjq.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.70:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:17 +0000 UTC Warning Pod vertx-create-span-sidecar-544db6f48f-hfqjq.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.70:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:17 +0000 UTC Normal Pod vertx-create-span-sidecar-568b7c9f6f-ffj5k.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:18 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-ffj5k.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.98:8080/": read tcp 10.129.2.2:47480->10.129.2.98:8080: read: connection reset by peer kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:18 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-ffj5k.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.98:8080/": dial tcp 10.129.2.98:8080: connect: connection refused kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:19 +0000 UTC Normal Pod vertx-create-span-sidecar-544db6f48f-hfqjq.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:20 +0000 UTC Warning Pod vertx-create-span-sidecar-544db6f48f-hfqjq.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.70:8080/": read tcp 10.131.0.2:49636->10.131.0.70:8080: read: connection reset by peer kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:20 +0000 UTC Warning Pod vertx-create-span-sidecar-544db6f48f-hfqjq.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.70:8080/": dial tcp 10.131.0.70:8080: connect: connection refused kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:22 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:26 +0000 UTC Normal Pod agent-as-sidecar2-6b59d44968-28jpl Binding Scheduled Successfully assigned kuttl-test-sweet-monkey/agent-as-sidecar2-6b59d44968-28jpl to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:26 +0000 UTC Normal Pod agent-as-sidecar2-6b59d44968-28jpl AddedInterface Add eth0 [10.128.2.83/23] from ovn-kubernetes logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:26 +0000 UTC Normal Pod agent-as-sidecar2-6b59d44968-28jpl.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:26 +0000 UTC Normal Pod agent-as-sidecar2-6b59d44968-28jpl.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:26 +0000 UTC Normal Pod agent-as-sidecar2-6b59d44968-28jpl.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:26 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-6b59d44968 SuccessfulCreate Created pod: agent-as-sidecar2-6b59d44968-28jpl replicaset-controller logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:26 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-6b59d44968 to 1 deployment-controller logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:28 +0000 UTC Normal Pod agent-as-sidecar-5fbf46f66b-schkz.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:29 +0000 UTC Normal Pod 01-find-service-xhv86 Binding Scheduled Successfully assigned kuttl-test-sweet-monkey/01-find-service-xhv86 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:29 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-xhv86 job-controller logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:29 +0000 UTC Warning Pod vertx-create-span-sidecar-568b7c9f6f-ffj5k.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.98:8080/": read tcp 10.129.2.2:35622->10.129.2.98:8080: read: connection reset by peer kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:30 +0000 UTC Normal Pod 01-find-service-xhv86 AddedInterface Add eth0 [10.128.2.84/23] from ovn-kubernetes logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:30 +0000 UTC Normal Pod 01-find-service-xhv86.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4w0gwlt0/pipeline@sha256:0e2dcf14e5b6ffb6cf0f85f75542ab872f6eb3c2308d43f5a2226f430447249b" already present on machine kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:30 +0000 UTC Normal Pod 01-find-service-xhv86.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:30 +0000 UTC Normal Pod 01-find-service-xhv86.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:32 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-568b7c9f6f SuccessfulDelete Deleted pod: vertx-create-span-sidecar-568b7c9f6f-ffj5k replicaset-controller logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:32 +0000 UTC Normal Pod vertx-create-span-sidecar-7fb6f78f7f-7vskr Binding Scheduled Successfully assigned kuttl-test-sweet-monkey/vertx-create-span-sidecar-7fb6f78f7f-7vskr to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:32 +0000 UTC Normal Pod vertx-create-span-sidecar-7fb6f78f7f-7vskr AddedInterface Add eth0 [10.129.2.99/23] from ovn-kubernetes logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:32 +0000 UTC Normal Pod vertx-create-span-sidecar-7fb6f78f7f-7vskr.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:32 +0000 UTC Normal Pod vertx-create-span-sidecar-7fb6f78f7f-7vskr.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:32 +0000 UTC Normal Pod vertx-create-span-sidecar-7fb6f78f7f-7vskr.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:32 +0000 UTC Normal Pod vertx-create-span-sidecar-7fb6f78f7f-7vskr.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:32 +0000 UTC Normal Pod vertx-create-span-sidecar-7fb6f78f7f-7vskr.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:32 +0000 UTC Normal Pod vertx-create-span-sidecar-7fb6f78f7f-7vskr.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:32 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7fb6f78f7f SuccessfulCreate Created pod: vertx-create-span-sidecar-7fb6f78f7f-7vskr replicaset-controller logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:32 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-568b7c9f6f to 0 from 1 deployment-controller logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:32 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-7fb6f78f7f to 1 from 0 deployment-controller logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:40 +0000 UTC Warning Pod vertx-create-span-sidecar-7fb6f78f7f-7vskr.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.99:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:40 +0000 UTC Warning Pod vertx-create-span-sidecar-7fb6f78f7f-7vskr.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.129.2.99:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:42 +0000 UTC Normal Pod vertx-create-span-sidecar-7fb6f78f7f-7vskr.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:43 +0000 UTC Warning Pod vertx-create-span-sidecar-7fb6f78f7f-7vskr.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.99:8080/": read tcp 10.129.2.2:49842->10.129.2.99:8080: read: connection reset by peer kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:43 +0000 UTC Warning Pod vertx-create-span-sidecar-7fb6f78f7f-7vskr.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.99:8080/": dial tcp 10.129.2.99:8080: connect: connection refused kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:48 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:48 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-544db6f48f SuccessfulDelete Deleted pod: vertx-create-span-sidecar-544db6f48f-hfqjq replicaset-controller logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:48 +0000 UTC Normal Pod vertx-create-span-sidecar-6bc9b86494-wzlcw Binding Scheduled Successfully assigned kuttl-test-sweet-monkey/vertx-create-span-sidecar-6bc9b86494-wzlcw to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:48 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-6bc9b86494 SuccessfulCreate Created pod: vertx-create-span-sidecar-6bc9b86494-wzlcw replicaset-controller logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:48 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-544db6f48f to 0 from 1 deployment-controller logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:48 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-6bc9b86494 to 1 from 0 deployment-controller logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:49 +0000 UTC Normal Pod vertx-create-span-sidecar-6bc9b86494-wzlcw AddedInterface Add eth0 [10.128.2.85/23] from ovn-kubernetes logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:49 +0000 UTC Normal Pod vertx-create-span-sidecar-6bc9b86494-wzlcw.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:49 +0000 UTC Normal Pod vertx-create-span-sidecar-6bc9b86494-wzlcw.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:25:50 | sidecar-deployment | 2023-10-09 08:25:49 +0000 UTC Normal Pod vertx-create-span-sidecar-6bc9b86494-wzlcw.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:25:50 | sidecar-deployment | Deleting namespace: kuttl-test-sweet-monkey === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (126.70s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.96s) --- PASS: kuttl/harness/sidecar-namespace (50.76s) --- PASS: kuttl/harness/sidecar-skip-webhook (13.52s) --- PASS: kuttl/harness/sidecar-deployment (56.43s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name sidecar --report --output /logs/artifacts/sidecar.xml ./artifacts/kuttl-report.xml time="2023-10-09T08:25:57Z" level=debug msg="Setting a new name for the test suites" time="2023-10-09T08:25:57Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-10-09T08:25:57Z" level=debug msg="normalizing test case names" time="2023-10-09T08:25:57Z" level=debug msg="sidecar/artifacts -> sidecar_artifacts" time="2023-10-09T08:25:57Z" level=debug msg="sidecar/sidecar-namespace -> sidecar_sidecar_namespace" time="2023-10-09T08:25:57Z" level=debug msg="sidecar/sidecar-skip-webhook -> sidecar_sidecar_skip_webhook" time="2023-10-09T08:25:57Z" level=debug msg="sidecar/sidecar-deployment -> sidecar_sidecar_deployment" +------------------------------+--------+ | NAME | RESULT | +------------------------------+--------+ | sidecar_artifacts | passed | | sidecar_sidecar_namespace | passed | | sidecar_sidecar_skip_webhook | passed | | sidecar_sidecar_deployment | passed | +------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 2 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/sidecar.xml + '[' 0 -gt 0 ']' + '[' 1 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh streaming false true + '[' 3 -ne 3 ']' + test_suite_name=streaming + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/streaming.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-streaming make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=0.32.0 \ SKIP_KAFKA=false \ SKIP_ES_EXTERNAL=true \ ./tests/e2e/streaming/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-10-06-234925 True False 59m Cluster version is 4.14.0-0.nightly-2023-10-06-234925' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-10-06-234925 True False 59m Cluster version is 4.14.0-0.nightly-2023-10-06-234925' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 0.32.0 ']' ++ version_le 0.32.0 0.25.0 +++ echo 0.32.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 0.32.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/streaming/render.sh ++ export SUITE_DIR=./tests/e2e/streaming ++ SUITE_DIR=./tests/e2e/streaming ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/streaming ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + '[' false = true ']' + start_test streaming-simple + '[' 1 -ne 1 ']' + test_name=streaming-simple + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-simple' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-simple\e[0m' Rendering files for test streaming-simple + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build + '[' _build '!=' _build ']' + mkdir -p streaming-simple + cd streaming-simple + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./1-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/streaming-jaeger-assert.yaml.template -o ./04-assert.yaml + render_smoke_test simple-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=simple-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + export JAEGER_NAME=simple-streaming + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-tls + '[' 1 -ne 1 ']' + test_name=streaming-with-tls + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-tls' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-tls\e[0m' Rendering files for test streaming-with-tls + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-simple + '[' streaming-simple '!=' _build ']' + cd .. + mkdir -p streaming-with-tls + cd streaming-with-tls + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./1-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + render_smoke_test tls-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=tls-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + export JAEGER_NAME=tls-streaming + JAEGER_NAME=tls-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-autoprovisioning-autoscale + '[' 1 -ne 1 ']' + test_name=streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-autoprovisioning-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-autoprovisioning-autoscale\e[0m' Rendering files for test streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-with-tls + '[' streaming-with-tls '!=' _build ']' + cd .. + mkdir -p streaming-with-autoprovisioning-autoscale + cd streaming-with-autoprovisioning-autoscale + '[' true = true ']' + rm ./00-install.yaml ./00-assert.yaml + render_install_elasticsearch upstream 01 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=01 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./01-assert.yaml + jaeger_name=auto-provisioned + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="20Mi"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="500m"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.autoscale=true ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.minReplicas=1 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.maxReplicas=2 ./02-install.yaml + render_assert_kafka true auto-provisioned 03 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provisioned + test_step=03 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./03-assert.yaml ++ expr 03 + 1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./4-assert.yaml ++ expr 03 + 2 + CLUSTER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./05-assert.yaml + render_install_tracegen auto-provisioned 06 + '[' 2 -ne 2 ']' + jaeger=auto-provisioned + step=06 + replicas=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/tracegen.yaml -o ./06-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.replicas=1 ./06-install.yaml + sed -i s~simple-prod~auto-provisioned~gi ./06-install.yaml + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-tracegen.yaml.template -o ./06-assert.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running streaming E2E tests' Running streaming E2E tests + cd tests/e2e/streaming/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1500832312 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 4 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/streaming-simple === PAUSE kuttl/harness/streaming-simple === RUN kuttl/harness/streaming-with-autoprovisioning-autoscale === PAUSE kuttl/harness/streaming-with-autoprovisioning-autoscale === RUN kuttl/harness/streaming-with-tls === PAUSE kuttl/harness/streaming-with-tls === CONT kuttl/harness/artifacts logger.go:42: 08:26:10 | artifacts | Creating namespace: kuttl-test-resolved-opossum logger.go:42: 08:26:10 | artifacts | artifacts events from ns kuttl-test-resolved-opossum: logger.go:42: 08:26:10 | artifacts | Deleting namespace: kuttl-test-resolved-opossum === CONT kuttl/harness/streaming-with-autoprovisioning-autoscale logger.go:42: 08:26:16 | streaming-with-autoprovisioning-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:26:16 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:26:16 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:26:16 | streaming-with-autoprovisioning-autoscale | Creating namespace: kuttl-test-cute-mantis logger.go:42: 08:26:16 | streaming-with-autoprovisioning-autoscale/1-install | starting test step 1-install logger.go:42: 08:26:16 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc create sa deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 08:26:16 | streaming-with-autoprovisioning-autoscale/1-install | serviceaccount/deploy-elasticsearch created logger.go:42: 08:26:16 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc adm policy add-scc-to-user privileged -z deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 08:26:16 | streaming-with-autoprovisioning-autoscale/1-install | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:privileged added: "deploy-elasticsearch" logger.go:42: 08:26:16 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 6] logger.go:42: 08:26:22 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_0.yml -n $NAMESPACE] logger.go:42: 08:26:22 | streaming-with-autoprovisioning-autoscale/1-install | statefulset.apps/elasticsearch created logger.go:42: 08:26:22 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 3] logger.go:42: 08:26:25 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_1.yml -n $NAMESPACE] logger.go:42: 08:26:26 | streaming-with-autoprovisioning-autoscale/1-install | service/elasticsearch created logger.go:42: 08:26:43 | streaming-with-autoprovisioning-autoscale/1-install | test step completed 1-install logger.go:42: 08:26:43 | streaming-with-autoprovisioning-autoscale/2-install | starting test step 2-install logger.go:42: 08:26:43 | streaming-with-autoprovisioning-autoscale/2-install | Jaeger:kuttl-test-cute-mantis/auto-provisioned created logger.go:42: 08:26:43 | streaming-with-autoprovisioning-autoscale/2-install | test step completed 2-install logger.go:42: 08:26:43 | streaming-with-autoprovisioning-autoscale/3- | starting test step 3- logger.go:42: 08:27:18 | streaming-with-autoprovisioning-autoscale/3- | test step completed 3- logger.go:42: 08:27:18 | streaming-with-autoprovisioning-autoscale/4- | starting test step 4- logger.go:42: 08:27:48 | streaming-with-autoprovisioning-autoscale/4- | test step completed 4- logger.go:42: 08:27:48 | streaming-with-autoprovisioning-autoscale/5- | starting test step 5- logger.go:42: 08:28:10 | streaming-with-autoprovisioning-autoscale/5- | test step completed 5- logger.go:42: 08:28:10 | streaming-with-autoprovisioning-autoscale/6-install | starting test step 6-install logger.go:42: 08:28:10 | streaming-with-autoprovisioning-autoscale/6-install | Deployment:kuttl-test-cute-mantis/tracegen created logger.go:42: 08:28:16 | streaming-with-autoprovisioning-autoscale/6-install | test step completed 6-install logger.go:42: 08:28:16 | streaming-with-autoprovisioning-autoscale/7- | starting test step 7- logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale/7- | test step failed 7- case.go:364: failed in step 7- case.go:366: --- Deployment:kuttl-test-cute-mantis/auto-provisioned-ingester +++ Deployment:kuttl-test-cute-mantis/auto-provisioned-ingester @@ -1,8 +1,320 @@ apiVersion: apps/v1 kind: Deployment metadata: + labels: + app: jaeger + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: auto-provisioned + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: auto-provisioned-ingester + app.kubernetes.io/part-of: jaeger + managedFields: + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:ownerReferences: + .: {} + k:{"uid":"3cb43e80-036f-47da-b35b-18f7927937c9"}: {} + f:spec: + f:progressDeadlineSeconds: {} + f:replicas: {} + f:revisionHistoryLimit: {} + f:selector: {} + f:strategy: + f:type: {} + f:template: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.istio.io/inject: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:spec: + f:containers: + k:{"name":"jaeger-ingester"}: + .: {} + f:args: {} + f:env: + .: {} + k:{"name":"SPAN_STORAGE_TYPE"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:livenessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":14270,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:readinessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:resources: + .: {} + f:requests: + .: {} + f:memory: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/var/run/secrets/auto-provisioned"}: + .: {} + f:mountPath: {} + f:name: {} + k:{"mountPath":"/var/run/secrets/auto-provisioned-cluster-ca"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:enableServiceLinks: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:serviceAccount: {} + f:serviceAccountName: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"auto-provisioned-trusted-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"kafkauser-auto-provisioned"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"kafkauser-auto-provisioned-cluster-ca"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: Go-http-client + operation: Update + time: "2023-10-09T08:28:12Z" + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:deployment.kubernetes.io/revision: {} + f:status: + f:availableReplicas: {} + f:conditions: + .: {} + k:{"type":"Available"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + k:{"type":"Progressing"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + f:observedGeneration: {} + f:readyReplicas: {} + f:replicas: {} + f:updatedReplicas: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2023-10-09T08:28:14Z" name: auto-provisioned-ingester namespace: kuttl-test-cute-mantis + ownerReferences: + - apiVersion: jaegertracing.io/v1 + controller: true + kind: Jaeger + name: auto-provisioned + uid: 3cb43e80-036f-47da-b35b-18f7927937c9 +spec: + progressDeadlineSeconds: 600 + replicas: 1 + revisionHistoryLimit: 10 + selector: + matchLabels: + app: jaeger + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: auto-provisioned + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: auto-provisioned-ingester + app.kubernetes.io/part-of: jaeger + strategy: + type: Recreate + template: + metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "14270" + prometheus.io/scrape: "true" + sidecar.istio.io/inject: "false" + creationTimestamp: null + labels: + app: jaeger + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: auto-provisioned + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: auto-provisioned-ingester + app.kubernetes.io/part-of: jaeger + spec: + containers: + - args: + - --es.server-urls=http://elasticsearch:9200 + - --kafka.consumer.authentication=tls + - --kafka.consumer.brokers=auto-provisioned-kafka-bootstrap.kuttl-test-cute-mantis.svc.cluster.local:9093 + - --kafka.consumer.tls.ca=/var/run/secrets/auto-provisioned-cluster-ca/ca.crt + - --kafka.consumer.tls.cert=/var/run/secrets/auto-provisioned/user.crt + - --kafka.consumer.tls.enabled=true + - --kafka.consumer.tls.key=/var/run/secrets/auto-provisioned/user.key + env: + - name: SPAN_STORAGE_TYPE + value: elasticsearch + image: registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:6f019a3bc4c491d31a1af50e2929aa6e01b6c2c1fc12acbd0ef12204f4e56d07 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 14270 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-ingester + ports: + - containerPort: 14270 + name: admin-http + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 14270 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: + requests: + memory: 500m + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /var/run/secrets/auto-provisioned + name: kafkauser-auto-provisioned + - mountPath: /var/run/secrets/auto-provisioned-cluster-ca + name: kafkauser-auto-provisioned-cluster-ca + - mountPath: /etc/pki/ca-trust/extracted/pem + name: auto-provisioned-trusted-ca + readOnly: true + dnsPolicy: ClusterFirst + enableServiceLinks: false + restartPolicy: Always + schedulerName: default-scheduler + securityContext: {} + serviceAccount: auto-provisioned + serviceAccountName: auto-provisioned + terminationGracePeriodSeconds: 30 + volumes: + - name: kafkauser-auto-provisioned + secret: + defaultMode: 420 + secretName: auto-provisioned + - name: kafkauser-auto-provisioned-cluster-ca + secret: + defaultMode: 420 + secretName: auto-provisioned-cluster-ca-cert + - configMap: + defaultMode: 420 + items: + - key: ca-bundle.crt + path: tls-ca-bundle.pem + name: auto-provisioned-trusted-ca + name: auto-provisioned-trusted-ca status: - readyReplicas: 2 + availableReplicas: 1 + conditions: + - lastTransitionTime: "2023-10-09T08:28:14Z" + lastUpdateTime: "2023-10-09T08:28:14Z" + message: Deployment has minimum availability. + reason: MinimumReplicasAvailable + status: "True" + type: Available + - lastTransitionTime: "2023-10-09T08:28:12Z" + lastUpdateTime: "2023-10-09T08:28:14Z" + message: ReplicaSet "auto-provisioned-ingester-7ccc6c9c9b" has successfully progressed. + reason: NewReplicaSetAvailable + status: "True" + type: Progressing + observedGeneration: 1 + readyReplicas: 1 + replicas: 1 + updatedReplicas: 1 case.go:366: resource Deployment:kuttl-test-cute-mantis/auto-provisioned-ingester: .status.readyReplicas: value mismatch, expected: 2 != actual: 1 logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | streaming-with-autoprovisioning-autoscale events from ns kuttl-test-cute-mantis: logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:26:22 +0000 UTC Normal Pod elasticsearch-0 Binding Scheduled Successfully assigned kuttl-test-cute-mantis/elasticsearch-0 to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:26:22 +0000 UTC Normal StatefulSet.apps elasticsearch SuccessfulCreate create Pod elasticsearch-0 in StatefulSet elasticsearch successful statefulset-controller logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:26:23 +0000 UTC Normal Pod elasticsearch-0 AddedInterface Add eth0 [10.129.2.100/23] from ovn-kubernetes logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:26:23 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulling Pulling image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:26:31 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulled Successfully pulled image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" in 8.241607078s (8.241620468s including waiting) kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:26:31 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:26:31 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:26:38 +0000 UTC Warning Pod elasticsearch-0.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Get "http://10.129.2.100:9200/": dial tcp 10.129.2.100:9200: connect: connection refused kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:26:47 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-zookeeper NoPods No matching pods found controllermanager logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:26:47 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:26:47 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-cute-mantis/data-auto-provisioned-zookeeper-0" logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:26:47 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ExternalProvisioning waiting for a volume to be created, either by external provisioner "ebs.csi.aws.com" or manually created by system administrator persistentvolume-controller logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:26:50 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-df9d9884-d069-46b6-9268-b6d7b5848ac0 logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:26:51 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-cute-mantis/auto-provisioned-zookeeper-0 to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:26:53 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-df9d9884-d069-46b6-9268-b6d7b5848ac0" attachdetach-controller logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:26:57 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 AddedInterface Add eth0 [10.131.0.71/23] from ovn-kubernetes logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:26:57 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" already present on machine kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:26:57 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:26:57 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:18 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:19 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-kafka NoPods No matching pods found controllermanager logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:19 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-cute-mantis/data-0-auto-provisioned-kafka-0" logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:19 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ExternalProvisioning waiting for a volume to be created, either by external provisioner "ebs.csi.aws.com" or manually created by system administrator persistentvolume-controller logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:22 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-05778293-abfe-4da2-8a4d-f449b554a72b logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:23 +0000 UTC Normal Pod auto-provisioned-kafka-0 Binding Scheduled Successfully assigned kuttl-test-cute-mantis/auto-provisioned-kafka-0 to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:25 +0000 UTC Normal Pod auto-provisioned-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-05778293-abfe-4da2-8a4d-f449b554a72b" attachdetach-controller logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:28 +0000 UTC Normal Pod auto-provisioned-kafka-0 AddedInterface Add eth0 [10.131.0.72/23] from ovn-kubernetes logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:28 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" already present on machine kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:28 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:28 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:49 +0000 UTC Normal Pod auto-provisioned-entity-operator-6585449cd5-rgwfm Binding Scheduled Successfully assigned kuttl-test-cute-mantis/auto-provisioned-entity-operator-6585449cd5-rgwfm to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:49 +0000 UTC Normal Pod auto-provisioned-entity-operator-6585449cd5-rgwfm AddedInterface Add eth0 [10.131.0.73/23] from ovn-kubernetes logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:49 +0000 UTC Normal Pod auto-provisioned-entity-operator-6585449cd5-rgwfm.spec.containers{topic-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:abdf1ca15c55860e7962057fcb84d2eef800996fcbc3f9f80eeb7efa79dbafcc" already present on machine kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:49 +0000 UTC Normal Pod auto-provisioned-entity-operator-6585449cd5-rgwfm.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:49 +0000 UTC Normal Pod auto-provisioned-entity-operator-6585449cd5-rgwfm.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:49 +0000 UTC Normal Pod auto-provisioned-entity-operator-6585449cd5-rgwfm.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:abdf1ca15c55860e7962057fcb84d2eef800996fcbc3f9f80eeb7efa79dbafcc" already present on machine kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:49 +0000 UTC Normal Pod auto-provisioned-entity-operator-6585449cd5-rgwfm.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:49 +0000 UTC Normal Pod auto-provisioned-entity-operator-6585449cd5-rgwfm.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:49 +0000 UTC Normal Pod auto-provisioned-entity-operator-6585449cd5-rgwfm.spec.containers{tls-sidecar} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:88369bcc4453a18e8814bed84f1701b3e47e702c58f94738879ec9ad4a0d0f16" already present on machine kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:49 +0000 UTC Normal Pod auto-provisioned-entity-operator-6585449cd5-rgwfm.spec.containers{tls-sidecar} Created Created container tls-sidecar kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:49 +0000 UTC Normal Pod auto-provisioned-entity-operator-6585449cd5-rgwfm.spec.containers{tls-sidecar} Started Started container tls-sidecar kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:49 +0000 UTC Normal ReplicaSet.apps auto-provisioned-entity-operator-6585449cd5 SuccessfulCreate Created pod: auto-provisioned-entity-operator-6585449cd5-rgwfm replicaset-controller logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:27:49 +0000 UTC Normal Deployment.apps auto-provisioned-entity-operator ScalingReplicaSet Scaled up replica set auto-provisioned-entity-operator-6585449cd5 to 1 deployment-controller logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:10 +0000 UTC Normal Deployment.apps tracegen ScalingReplicaSet Scaled up replica set tracegen-b69b4b454 to 1 deployment-controller logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:11 +0000 UTC Normal Pod tracegen-b69b4b454-88zkv Binding Scheduled Successfully assigned kuttl-test-cute-mantis/tracegen-b69b4b454-88zkv to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:11 +0000 UTC Warning Pod tracegen-b69b4b454-88zkv FailedMount MountVolume.SetUp failed for volume "auto-provisioned-service-ca" : configmap "auto-provisioned-service-ca" not found kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:11 +0000 UTC Warning Pod tracegen-b69b4b454-88zkv FailedMount MountVolume.SetUp failed for volume "auto-provisioned-trusted-ca" : configmap "auto-provisioned-trusted-ca" not found kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:11 +0000 UTC Normal ReplicaSet.apps tracegen-b69b4b454 SuccessfulCreate Created pod: tracegen-b69b4b454-88zkv replicaset-controller logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:12 +0000 UTC Normal Pod auto-provisioned-collector-96c9559c8-5j8nk Binding Scheduled Successfully assigned kuttl-test-cute-mantis/auto-provisioned-collector-96c9559c8-5j8nk to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:12 +0000 UTC Normal ReplicaSet.apps auto-provisioned-collector-96c9559c8 SuccessfulCreate Created pod: auto-provisioned-collector-96c9559c8-5j8nk replicaset-controller logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:12 +0000 UTC Normal Deployment.apps auto-provisioned-collector ScalingReplicaSet Scaled up replica set auto-provisioned-collector-96c9559c8 to 1 deployment-controller logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:12 +0000 UTC Normal Pod auto-provisioned-ingester-7ccc6c9c9b-hdp85 Binding Scheduled Successfully assigned kuttl-test-cute-mantis/auto-provisioned-ingester-7ccc6c9c9b-hdp85 to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:12 +0000 UTC Normal ReplicaSet.apps auto-provisioned-ingester-7ccc6c9c9b SuccessfulCreate Created pod: auto-provisioned-ingester-7ccc6c9c9b-hdp85 replicaset-controller logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:12 +0000 UTC Normal Deployment.apps auto-provisioned-ingester ScalingReplicaSet Scaled up replica set auto-provisioned-ingester-7ccc6c9c9b to 1 deployment-controller logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:12 +0000 UTC Normal Pod auto-provisioned-query-87c946874-lslp8 Binding Scheduled Successfully assigned kuttl-test-cute-mantis/auto-provisioned-query-87c946874-lslp8 to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:12 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-87c946874 SuccessfulCreate Created pod: auto-provisioned-query-87c946874-lslp8 replicaset-controller logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:12 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-87c946874 to 1 deployment-controller logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:12 +0000 UTC Normal Pod tracegen-b69b4b454-88zkv AddedInterface Add eth0 [10.128.2.86/23] from ovn-kubernetes logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:12 +0000 UTC Normal Pod tracegen-b69b4b454-88zkv.spec.containers{tracegen} Pulling Pulling image "jaegertracing/jaeger-tracegen:1.49.0" kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:13 +0000 UTC Normal Pod auto-provisioned-collector-96c9559c8-5j8nk AddedInterface Add eth0 [10.129.2.101/23] from ovn-kubernetes logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:13 +0000 UTC Normal Pod auto-provisioned-collector-96c9559c8-5j8nk.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:13 +0000 UTC Normal Pod auto-provisioned-collector-96c9559c8-5j8nk.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:13 +0000 UTC Normal Pod auto-provisioned-collector-96c9559c8-5j8nk.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:13 +0000 UTC Normal Pod auto-provisioned-ingester-7ccc6c9c9b-hdp85 AddedInterface Add eth0 [10.129.2.102/23] from ovn-kubernetes logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:13 +0000 UTC Normal Pod auto-provisioned-ingester-7ccc6c9c9b-hdp85.spec.containers{jaeger-ingester} Pulled Container image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:6f019a3bc4c491d31a1af50e2929aa6e01b6c2c1fc12acbd0ef12204f4e56d07" already present on machine kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:13 +0000 UTC Normal Pod auto-provisioned-ingester-7ccc6c9c9b-hdp85.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:13 +0000 UTC Normal Pod auto-provisioned-ingester-7ccc6c9c9b-hdp85.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:13 +0000 UTC Normal Pod auto-provisioned-query-87c946874-lslp8 AddedInterface Add eth0 [10.128.2.87/23] from ovn-kubernetes logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:13 +0000 UTC Normal Pod auto-provisioned-query-87c946874-lslp8.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:13 +0000 UTC Normal Pod auto-provisioned-query-87c946874-lslp8.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:13 +0000 UTC Normal Pod auto-provisioned-query-87c946874-lslp8.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:13 +0000 UTC Normal Pod auto-provisioned-query-87c946874-lslp8.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:13 +0000 UTC Normal Pod auto-provisioned-query-87c946874-lslp8.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:13 +0000 UTC Normal Pod auto-provisioned-query-87c946874-lslp8.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:13 +0000 UTC Normal Pod auto-provisioned-query-87c946874-lslp8.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:13 +0000 UTC Normal Pod auto-provisioned-query-87c946874-lslp8.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:13 +0000 UTC Normal Pod auto-provisioned-query-87c946874-lslp8.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:14 +0000 UTC Normal Pod tracegen-b69b4b454-88zkv.spec.containers{tracegen} Pulled Successfully pulled image "jaegertracing/jaeger-tracegen:1.49.0" in 1.498460005s (1.498473595s including waiting) kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:14 +0000 UTC Normal Pod tracegen-b69b4b454-88zkv.spec.containers{tracegen} Created Created container tracegen kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:14 +0000 UTC Normal Pod tracegen-b69b4b454-88zkv.spec.containers{tracegen} Started Started container tracegen kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:14 +0000 UTC Normal Pod tracegen-b69b4b454-88zkv.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:14 +0000 UTC Normal Pod tracegen-b69b4b454-88zkv.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:14 +0000 UTC Normal Pod tracegen-b69b4b454-88zkv.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:28 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:28 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:28 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:28 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:28 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:28:28 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:29:13 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:29:13 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod auto-provisioned-collector-96c9559c8-5j8nk horizontal-pod-autoscaler logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:29:13 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:29:13 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:29:13 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedComputeMetricsReplicas invalid metrics (1 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:33:28 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod auto-provisioned-collector-96c9559c8-5j8nk horizontal-pod-autoscaler logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | 2023-10-09 08:33:28 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-ingester of Pod auto-provisioned-ingester-7ccc6c9c9b-hdp85 horizontal-pod-autoscaler logger.go:42: 08:35:16 | streaming-with-autoprovisioning-autoscale | Deleting namespace: kuttl-test-cute-mantis === CONT kuttl/harness/streaming-with-tls logger.go:42: 08:35:34 | streaming-with-tls | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:35:34 | streaming-with-tls | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:35:34 | streaming-with-tls | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:35:34 | streaming-with-tls | Creating namespace: kuttl-test-wondrous-gnat logger.go:42: 08:35:34 | streaming-with-tls/0-install | starting test step 0-install logger.go:42: 08:35:34 | streaming-with-tls/0-install | running command: [sh -c cd /tmp/jaeger-tests && make undeploy-kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true] logger.go:42: 08:35:34 | streaming-with-tls/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 08:35:34 | streaming-with-tls/0-install | >>>> Skiping kafka-operator undeploy logger.go:42: 08:35:34 | streaming-with-tls/0-install | kubectl delete --namespace kuttl-test-wondrous-gnat -f tests/_build/kafka-example.yaml 2>&1 || true logger.go:42: 08:35:34 | streaming-with-tls/0-install | error: the path "tests/_build/kafka-example.yaml" does not exist logger.go:42: 08:35:34 | streaming-with-tls/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 08:35:34 | streaming-with-tls/0-install | running command: [sh -c cd /tmp/jaeger-tests && make kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true KAFKA_VERSION=0.32.0] logger.go:42: 08:35:34 | streaming-with-tls/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 08:35:34 | streaming-with-tls/0-install | >>>> Creating namespace kuttl-test-wondrous-gnat logger.go:42: 08:35:34 | streaming-with-tls/0-install | kubectl create namespace kuttl-test-wondrous-gnat 2>&1 | grep -v "already exists" || true logger.go:42: 08:35:34 | streaming-with-tls/0-install | >>>> Skipping kafka-operator deployment, assuming it has been installed via OperatorHub logger.go:42: 08:35:34 | streaming-with-tls/0-install | >>>> Creating namespace kuttl-test-wondrous-gnat logger.go:42: 08:35:34 | streaming-with-tls/0-install | mkdir -p tests/_build/ logger.go:42: 08:35:34 | streaming-with-tls/0-install | kubectl create namespace kuttl-test-wondrous-gnat 2>&1 | grep -v "already exists" || true logger.go:42: 08:35:34 | streaming-with-tls/0-install | curl --fail --location "https://raw.githubusercontent.com/strimzi/strimzi-kafka-operator/0.32.0/examples/kafka/kafka-persistent-single.yaml" --output tests/_build/kafka-example.yaml --create-dirs logger.go:42: 08:35:34 | streaming-with-tls/0-install | % Total % Received % Xferd Average Speed Time Time Time Current logger.go:42: 08:35:34 | streaming-with-tls/0-install | Dload Upload Total Spent Left Speed logger.go:42: 08:35:34 | streaming-with-tls/0-install | 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 865 100 865 0 0 27169 0 --:--:-- --:--:-- --:--:-- 27903 logger.go:42: 08:35:34 | streaming-with-tls/0-install | "sed" -i 's/size: 100Gi/size: 10Gi/g' tests/_build/kafka-example.yaml logger.go:42: 08:35:34 | streaming-with-tls/0-install | kubectl -n kuttl-test-wondrous-gnat apply --dry-run=client -f tests/_build/kafka-example.yaml logger.go:42: 08:35:35 | streaming-with-tls/0-install | kafka.kafka.strimzi.io/my-cluster created (dry run) logger.go:42: 08:35:35 | streaming-with-tls/0-install | kubectl -n kuttl-test-wondrous-gnat apply -f tests/_build/kafka-example.yaml 2>&1 | grep -v "already exists" || true logger.go:42: 08:35:35 | streaming-with-tls/0-install | kafka.kafka.strimzi.io/my-cluster created logger.go:42: 08:35:35 | streaming-with-tls/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 08:42:36 | streaming-with-tls/0-install | test step failed 0-install case.go:364: failed in step 0-install case.go:366: strimzipodsets.core.strimzi.io "my-cluster-zookeeper" not found logger.go:42: 08:42:36 | streaming-with-tls | streaming-with-tls events from ns kuttl-test-wondrous-gnat: logger.go:42: 08:42:36 | streaming-with-tls | Deleting namespace: kuttl-test-wondrous-gnat === CONT kuttl/harness/streaming-simple logger.go:42: 08:42:42 | streaming-simple | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:42:42 | streaming-simple | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:42:42 | streaming-simple | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:42:42 | streaming-simple | Creating namespace: kuttl-test-bright-kit logger.go:42: 08:42:42 | streaming-simple/0-install | starting test step 0-install logger.go:42: 08:42:42 | streaming-simple/0-install | running command: [sh -c cd /tmp/jaeger-tests && make undeploy-kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true] logger.go:42: 08:42:42 | streaming-simple/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 08:42:42 | streaming-simple/0-install | >>>> Skiping kafka-operator undeploy logger.go:42: 08:42:42 | streaming-simple/0-install | kubectl delete --namespace kuttl-test-bright-kit -f tests/_build/kafka-example.yaml 2>&1 || true logger.go:42: 08:42:42 | streaming-simple/0-install | Error from server (NotFound): error when deleting "tests/_build/kafka-example.yaml": kafkas.kafka.strimzi.io "my-cluster" not found logger.go:42: 08:42:42 | streaming-simple/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 08:42:42 | streaming-simple/0-install | running command: [sh -c cd /tmp/jaeger-tests && make kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true KAFKA_VERSION=0.32.0] logger.go:42: 08:42:42 | streaming-simple/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 08:42:42 | streaming-simple/0-install | >>>> Creating namespace kuttl-test-bright-kit logger.go:42: 08:42:42 | streaming-simple/0-install | kubectl create namespace kuttl-test-bright-kit 2>&1 | grep -v "already exists" || true logger.go:42: 08:42:42 | streaming-simple/0-install | >>>> Skipping kafka-operator deployment, assuming it has been installed via OperatorHub logger.go:42: 08:42:42 | streaming-simple/0-install | >>>> Creating namespace kuttl-test-bright-kit logger.go:42: 08:42:42 | streaming-simple/0-install | mkdir -p tests/_build/ logger.go:42: 08:42:42 | streaming-simple/0-install | kubectl create namespace kuttl-test-bright-kit 2>&1 | grep -v "already exists" || true logger.go:42: 08:42:42 | streaming-simple/0-install | curl --fail --location "https://raw.githubusercontent.com/strimzi/strimzi-kafka-operator/0.32.0/examples/kafka/kafka-persistent-single.yaml" --output tests/_build/kafka-example.yaml --create-dirs logger.go:42: 08:42:42 | streaming-simple/0-install | % Total % Received % Xferd Average Speed Time Time Time Current logger.go:42: 08:42:42 | streaming-simple/0-install | Dload Upload Total Spent Left Speed logger.go:42: 08:42:42 | streaming-simple/0-install | 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 865 100 865 0 0 24592 0 --:--:-- --:--:-- --:--:-- 24714 logger.go:42: 08:42:42 | streaming-simple/0-install | "sed" -i 's/size: 100Gi/size: 10Gi/g' tests/_build/kafka-example.yaml logger.go:42: 08:42:42 | streaming-simple/0-install | kubectl -n kuttl-test-bright-kit apply --dry-run=client -f tests/_build/kafka-example.yaml logger.go:42: 08:42:43 | streaming-simple/0-install | kafka.kafka.strimzi.io/my-cluster created (dry run) logger.go:42: 08:42:43 | streaming-simple/0-install | kubectl -n kuttl-test-bright-kit apply -f tests/_build/kafka-example.yaml 2>&1 | grep -v "already exists" || true logger.go:42: 08:42:44 | streaming-simple/0-install | kafka.kafka.strimzi.io/my-cluster created logger.go:42: 08:42:44 | streaming-simple/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 08:49:44 | streaming-simple/0-install | test step failed 0-install case.go:364: failed in step 0-install case.go:366: strimzipodsets.core.strimzi.io "my-cluster-zookeeper" not found logger.go:42: 08:49:44 | streaming-simple | streaming-simple events from ns kuttl-test-bright-kit: logger.go:42: 08:49:44 | streaming-simple | Deleting namespace: kuttl-test-bright-kit === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- FAIL: kuttl (1420.21s) --- FAIL: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.93s) --- FAIL: kuttl/harness/streaming-with-autoprovisioning-autoscale (558.22s) --- FAIL: kuttl/harness/streaming-with-tls (427.94s) --- FAIL: kuttl/harness/streaming-simple (428.08s) FAIL + exit_code=1 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name streaming --report --output /logs/artifacts/streaming.xml ./artifacts/kuttl-report.xml time="2023-10-09T08:49:50Z" level=debug msg="Setting a new name for the test suites" time="2023-10-09T08:49:50Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-10-09T08:49:50Z" level=debug msg="normalizing test case names" time="2023-10-09T08:49:50Z" level=debug msg="streaming/artifacts -> streaming_artifacts" time="2023-10-09T08:49:50Z" level=debug msg="streaming/streaming-with-autoprovisioning-autoscale -> streaming_streaming_with_autoprovisioning_autoscale" time="2023-10-09T08:49:50Z" level=debug msg="streaming/streaming-with-tls -> streaming_streaming_with_tls" time="2023-10-09T08:49:50Z" level=debug msg="streaming/streaming-simple -> streaming_streaming_simple" +-----------------------------------------------------+--------+ | NAME | RESULT | +-----------------------------------------------------+--------+ | streaming_artifacts | passed | | streaming_streaming_with_autoprovisioning_autoscale | failed | | streaming_streaming_with_tls | failed | | streaming_streaming_simple | failed | +-----------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 2 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/sidecar.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/streaming.xml + '[' 3 -gt 0 ']' + count=2 + '[' 2 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh ui false true + '[' 3 -ne 3 ']' + test_suite_name=ui + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/ui.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-ui make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true ./tests/e2e/ui/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-10-06-234925 True False 83m Cluster version is 4.14.0-0.nightly-2023-10-06-234925' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-10-06-234925 True False 83m Cluster version is 4.14.0-0.nightly-2023-10-06-234925' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/ui/render.sh ++ export SUITE_DIR=./tests/e2e/ui ++ SUITE_DIR=./tests/e2e/ui ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/ui ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test allinone + '[' 1 -ne 1 ']' + test_name=allinone + echo =========================================================================== =========================================================================== + info 'Rendering files for test allinone' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test allinone\e[0m' Rendering files for test allinone + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build + '[' _build '!=' _build ']' + mkdir -p allinone + cd allinone + export GET_URL_COMMAND + export URL + export JAEGER_NAME=all-in-one-ui + JAEGER_NAME=all-in-one-ui + '[' true = true ']' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./01-curl.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./04-test-ui-config.yaml + start_test production + '[' 1 -ne 1 ']' + test_name=production + echo =========================================================================== =========================================================================== + info 'Rendering files for test production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test production\e[0m' Rendering files for test production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build/allinone + '[' allinone '!=' _build ']' + cd .. + mkdir -p production + cd production + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + [[ true = true ]] + [[ true = true ]] + render_install_jaeger production-ui production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + '[' true = true ']' + INSECURE=true + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-forbbiden-access.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-curl.yaml + INSECURE=true + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./05-check-disabled-security.yaml + ASSERT_PRESENT=false + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./06-check-NO-gaID.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./08-check-gaID.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running ui E2E tests' Running ui E2E tests + cd tests/e2e/ui/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1500832312 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 3 tests === RUN kuttl/harness === RUN kuttl/harness/allinone === PAUSE kuttl/harness/allinone === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/production === PAUSE kuttl/harness/production === CONT kuttl/harness/allinone logger.go:42: 08:49:57 | allinone | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:49:57 | allinone | Creating namespace: kuttl-test-safe-bee logger.go:42: 08:49:57 | allinone/0-install | starting test step 0-install logger.go:42: 08:49:57 | allinone/0-install | Jaeger:kuttl-test-safe-bee/all-in-one-ui created logger.go:42: 08:50:01 | allinone/0-install | test step completed 0-install logger.go:42: 08:50:01 | allinone/1-curl | starting test step 1-curl logger.go:42: 08:50:01 | allinone/1-curl | running command: [./ensure-ingress-host.sh] logger.go:42: 08:50:01 | allinone/1-curl | Checking the Ingress host value was populated logger.go:42: 08:50:01 | allinone/1-curl | Try number 0 logger.go:42: 08:50:01 | allinone/1-curl | Hostname is all-in-one-ui-kuttl-test-safe-bee.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com logger.go:42: 08:50:01 | allinone/1-curl | running command: [sh -c ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE all-in-one-ui] logger.go:42: 08:50:01 | allinone/1-curl | Checking an expected HTTP response logger.go:42: 08:50:01 | allinone/1-curl | Running in OpenShift logger.go:42: 08:50:01 | allinone/1-curl | User not provided. Getting the token... logger.go:42: 08:50:03 | allinone/1-curl | Warning: resource jaegers/all-in-one-ui is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:50:09 | allinone/1-curl | Try number 1/30 the https://all-in-one-ui-kuttl-test-safe-bee.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 08:50:09 | allinone/1-curl | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 08:50:09 | allinone/1-curl | Try number 2/30 the https://all-in-one-ui-kuttl-test-safe-bee.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 08:50:09 | allinone/1-curl | HTTP response is 503. 200 expected. Waiting 10 s logger.go:42: 08:50:19 | allinone/1-curl | Try number 3/30 the https://all-in-one-ui-kuttl-test-safe-bee.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 08:50:19 | allinone/1-curl | curl response asserted properly logger.go:42: 08:50:19 | allinone/1-curl | test step completed 1-curl logger.go:42: 08:50:19 | allinone/2-delete | starting test step 2-delete logger.go:42: 08:50:19 | allinone/2-delete | Jaeger:kuttl-test-safe-bee/all-in-one-ui created logger.go:42: 08:50:19 | allinone/2-delete | test step completed 2-delete logger.go:42: 08:50:19 | allinone/3-install | starting test step 3-install logger.go:42: 08:50:19 | allinone/3-install | Jaeger:kuttl-test-safe-bee/all-in-one-ui updated logger.go:42: 08:50:19 | allinone/3-install | test step completed 3-install logger.go:42: 08:50:19 | allinone/4-test-ui-config | starting test step 4-test-ui-config logger.go:42: 08:50:19 | allinone/4-test-ui-config | running command: [./ensure-ingress-host.sh] logger.go:42: 08:50:19 | allinone/4-test-ui-config | Checking the Ingress host value was populated logger.go:42: 08:50:19 | allinone/4-test-ui-config | Try number 0 logger.go:42: 08:50:19 | allinone/4-test-ui-config | error: error executing jsonpath "{.items[0].status.ingress[0].host}": Error executing template: array index out of bounds: index 0, length 0. Printing more information for debugging the template: logger.go:42: 08:50:19 | allinone/4-test-ui-config | template was: logger.go:42: 08:50:19 | allinone/4-test-ui-config | {.items[0].status.ingress[0].host} logger.go:42: 08:50:19 | allinone/4-test-ui-config | object given to jsonpath engine was: logger.go:42: 08:50:19 | allinone/4-test-ui-config | map[string]interface {}{"apiVersion":"v1", "items":[]interface {}{}, "kind":"List", "metadata":map[string]interface {}{"resourceVersion":""}} logger.go:42: 08:50:19 | allinone/4-test-ui-config | logger.go:42: 08:50:19 | allinone/4-test-ui-config | logger.go:42: 08:50:29 | allinone/4-test-ui-config | Try number 1 logger.go:42: 08:50:29 | allinone/4-test-ui-config | Hostname is all-in-one-ui-kuttl-test-safe-bee.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com logger.go:42: 08:50:29 | allinone/4-test-ui-config | running command: [sh -c ASSERT_PRESENT=true EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 08:50:30 | allinone/4-test-ui-config | time="2023-10-09T08:50:30Z" level=info msg="Querying https://all-in-one-ui-kuttl-test-safe-bee.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search..." logger.go:42: 08:50:30 | allinone/4-test-ui-config | time="2023-10-09T08:50:30Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 08:50:30 | allinone/4-test-ui-config | time="2023-10-09T08:50:30Z" level=info msg="Polling to https://all-in-one-ui-kuttl-test-safe-bee.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search" logger.go:42: 08:50:30 | allinone/4-test-ui-config | time="2023-10-09T08:50:30Z" level=info msg="Doing request number 0" logger.go:42: 08:50:30 | allinone/4-test-ui-config | time="2023-10-09T08:50:30Z" level=info msg="Content found and asserted!" logger.go:42: 08:50:30 | allinone/4-test-ui-config | time="2023-10-09T08:50:30Z" level=info msg="Success!" logger.go:42: 08:50:30 | allinone/4-test-ui-config | test step completed 4-test-ui-config logger.go:42: 08:50:30 | allinone | allinone events from ns kuttl-test-safe-bee: logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:00 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-6c564c8d76 SuccessfulCreate Created pod: all-in-one-ui-6c564c8d76-vwj7r replicaset-controller logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:00 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-6c564c8d76 to 1 deployment-controller logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:01 +0000 UTC Normal Pod all-in-one-ui-6c564c8d76-vwj7r Binding Scheduled Successfully assigned kuttl-test-safe-bee/all-in-one-ui-6c564c8d76-vwj7r to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:01 +0000 UTC Normal Pod all-in-one-ui-6c564c8d76-vwj7r AddedInterface Add eth0 [10.128.2.90/23] from ovn-kubernetes logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:01 +0000 UTC Normal Pod all-in-one-ui-6c564c8d76-vwj7r.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:01 +0000 UTC Normal Pod all-in-one-ui-6c564c8d76-vwj7r.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:01 +0000 UTC Normal Pod all-in-one-ui-6c564c8d76-vwj7r.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:01 +0000 UTC Normal Pod all-in-one-ui-6c564c8d76-vwj7r.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:01 +0000 UTC Normal Pod all-in-one-ui-6c564c8d76-vwj7r.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:01 +0000 UTC Normal Pod all-in-one-ui-6c564c8d76-vwj7r.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:05 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-6c564c8d76 SuccessfulDelete Deleted pod: all-in-one-ui-6c564c8d76-vwj7r replicaset-controller logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:05 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled down replica set all-in-one-ui-6c564c8d76 to 0 from 1 deployment-controller logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:06 +0000 UTC Normal Pod all-in-one-ui-6c564c8d76-vwj7r.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:06 +0000 UTC Normal Pod all-in-one-ui-6c564c8d76-vwj7r.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:07 +0000 UTC Normal Pod all-in-one-ui-686689dbbf-24q5n Binding Scheduled Successfully assigned kuttl-test-safe-bee/all-in-one-ui-686689dbbf-24q5n to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:07 +0000 UTC Normal Pod all-in-one-ui-686689dbbf-24q5n AddedInterface Add eth0 [10.128.2.91/23] from ovn-kubernetes logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:07 +0000 UTC Normal Pod all-in-one-ui-686689dbbf-24q5n.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:07 +0000 UTC Normal Pod all-in-one-ui-686689dbbf-24q5n.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:07 +0000 UTC Normal Pod all-in-one-ui-686689dbbf-24q5n.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:07 +0000 UTC Normal Pod all-in-one-ui-686689dbbf-24q5n.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:07 +0000 UTC Normal Pod all-in-one-ui-686689dbbf-24q5n.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:07 +0000 UTC Normal Pod all-in-one-ui-686689dbbf-24q5n.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:07 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-686689dbbf SuccessfulCreate Created pod: all-in-one-ui-686689dbbf-24q5n replicaset-controller logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:07 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-686689dbbf to 1 deployment-controller logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:19 +0000 UTC Normal Pod all-in-one-ui-686689dbbf-24q5n.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:19 +0000 UTC Normal Pod all-in-one-ui-686689dbbf-24q5n.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:23 +0000 UTC Normal Pod all-in-one-ui-7b99df77cf-4lmwc Binding Scheduled Successfully assigned kuttl-test-safe-bee/all-in-one-ui-7b99df77cf-4lmwc to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:23 +0000 UTC Normal Pod all-in-one-ui-7b99df77cf-4lmwc AddedInterface Add eth0 [10.128.2.92/23] from ovn-kubernetes logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:23 +0000 UTC Normal Pod all-in-one-ui-7b99df77cf-4lmwc.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:f60839e11d014ff68b73997be24de6da1040816a2a88a4633b6f57fcb964c7b9" already present on machine kubelet logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:23 +0000 UTC Normal Pod all-in-one-ui-7b99df77cf-4lmwc.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:23 +0000 UTC Normal Pod all-in-one-ui-7b99df77cf-4lmwc.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:23 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-7b99df77cf SuccessfulCreate Created pod: all-in-one-ui-7b99df77cf-4lmwc replicaset-controller logger.go:42: 08:50:30 | allinone | 2023-10-09 08:50:23 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-7b99df77cf to 1 deployment-controller logger.go:42: 08:50:30 | allinone | Deleting namespace: kuttl-test-safe-bee === CONT kuttl/harness/production logger.go:42: 08:50:36 | production | Ignoring add-tracking-id.yaml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:50:36 | production | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:50:36 | production | Creating namespace: kuttl-test-capital-skunk logger.go:42: 08:50:36 | production/1-install | starting test step 1-install logger.go:42: 08:50:36 | production/1-install | Jaeger:kuttl-test-capital-skunk/production-ui created logger.go:42: 08:51:12 | production/1-install | test step completed 1-install logger.go:42: 08:51:12 | production/2-check-forbbiden-access | starting test step 2-check-forbbiden-access logger.go:42: 08:51:12 | production/2-check-forbbiden-access | running command: [./ensure-ingress-host.sh] logger.go:42: 08:51:12 | production/2-check-forbbiden-access | Checking the Ingress host value was populated logger.go:42: 08:51:12 | production/2-check-forbbiden-access | Try number 0 logger.go:42: 08:51:12 | production/2-check-forbbiden-access | Hostname is production-ui-kuttl-test-capital-skunk.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com logger.go:42: 08:51:12 | production/2-check-forbbiden-access | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE production-ui] logger.go:42: 08:51:12 | production/2-check-forbbiden-access | Checking an expected HTTP response logger.go:42: 08:51:12 | production/2-check-forbbiden-access | Running in OpenShift logger.go:42: 08:51:12 | production/2-check-forbbiden-access | Not using any secret logger.go:42: 08:51:12 | production/2-check-forbbiden-access | Try number 1/30 the https://production-ui-kuttl-test-capital-skunk.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 08:51:12 | production/2-check-forbbiden-access | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 08:51:12 | production/2-check-forbbiden-access | Try number 2/30 the https://production-ui-kuttl-test-capital-skunk.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 08:51:12 | production/2-check-forbbiden-access | HTTP response is 503. 403 expected. Waiting 10 s logger.go:42: 08:51:22 | production/2-check-forbbiden-access | Try number 3/30 the https://production-ui-kuttl-test-capital-skunk.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 08:51:22 | production/2-check-forbbiden-access | curl response asserted properly logger.go:42: 08:51:22 | production/2-check-forbbiden-access | test step completed 2-check-forbbiden-access logger.go:42: 08:51:22 | production/3-curl | starting test step 3-curl logger.go:42: 08:51:22 | production/3-curl | running command: [./ensure-ingress-host.sh] logger.go:42: 08:51:22 | production/3-curl | Checking the Ingress host value was populated logger.go:42: 08:51:22 | production/3-curl | Try number 0 logger.go:42: 08:51:22 | production/3-curl | Hostname is production-ui-kuttl-test-capital-skunk.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com logger.go:42: 08:51:22 | production/3-curl | running command: [sh -c ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE production-ui] logger.go:42: 08:51:22 | production/3-curl | Checking an expected HTTP response logger.go:42: 08:51:22 | production/3-curl | Running in OpenShift logger.go:42: 08:51:22 | production/3-curl | User not provided. Getting the token... logger.go:42: 08:51:23 | production/3-curl | Warning: resource jaegers/production-ui is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:51:30 | production/3-curl | Try number 1/30 the https://production-ui-kuttl-test-capital-skunk.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 08:51:30 | production/3-curl | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 08:51:30 | production/3-curl | Try number 2/30 the https://production-ui-kuttl-test-capital-skunk.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 08:51:30 | production/3-curl | HTTP response is 503. 200 expected. Waiting 10 s logger.go:42: 08:51:40 | production/3-curl | Try number 3/30 the https://production-ui-kuttl-test-capital-skunk.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 08:51:40 | production/3-curl | curl response asserted properly logger.go:42: 08:51:40 | production/3-curl | test step completed 3-curl logger.go:42: 08:51:40 | production/4-install | starting test step 4-install logger.go:42: 08:51:40 | production/4-install | Jaeger:kuttl-test-capital-skunk/production-ui updated logger.go:42: 08:51:40 | production/4-install | test step completed 4-install logger.go:42: 08:51:40 | production/5-check-disabled-security | starting test step 5-check-disabled-security logger.go:42: 08:51:40 | production/5-check-disabled-security | running command: [./ensure-ingress-host.sh] logger.go:42: 08:51:40 | production/5-check-disabled-security | Checking the Ingress host value was populated logger.go:42: 08:51:40 | production/5-check-disabled-security | Try number 0 logger.go:42: 08:51:40 | production/5-check-disabled-security | Hostname is production-ui-kuttl-test-capital-skunk.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com logger.go:42: 08:51:40 | production/5-check-disabled-security | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE production-ui] logger.go:42: 08:51:40 | production/5-check-disabled-security | Checking an expected HTTP response logger.go:42: 08:51:40 | production/5-check-disabled-security | Running in OpenShift logger.go:42: 08:51:40 | production/5-check-disabled-security | Not using any secret logger.go:42: 08:51:40 | production/5-check-disabled-security | Try number 1/30 the https://production-ui-kuttl-test-capital-skunk.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 08:51:40 | production/5-check-disabled-security | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 08:51:40 | production/5-check-disabled-security | Try number 2/30 the https://production-ui-kuttl-test-capital-skunk.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 08:51:40 | production/5-check-disabled-security | HTTP response is 403. 200 expected. Waiting 10 s logger.go:42: 08:51:50 | production/5-check-disabled-security | Try number 3/30 the https://production-ui-kuttl-test-capital-skunk.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search logger.go:42: 08:51:50 | production/5-check-disabled-security | curl response asserted properly logger.go:42: 08:51:50 | production/5-check-disabled-security | test step completed 5-check-disabled-security logger.go:42: 08:51:50 | production/6-check-NO-gaID | starting test step 6-check-NO-gaID logger.go:42: 08:51:50 | production/6-check-NO-gaID | running command: [./ensure-ingress-host.sh] logger.go:42: 08:51:50 | production/6-check-NO-gaID | Checking the Ingress host value was populated logger.go:42: 08:51:50 | production/6-check-NO-gaID | Try number 0 logger.go:42: 08:51:50 | production/6-check-NO-gaID | Hostname is production-ui-kuttl-test-capital-skunk.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com logger.go:42: 08:51:50 | production/6-check-NO-gaID | running command: [sh -c ASSERT_PRESENT=false EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 08:51:50 | production/6-check-NO-gaID | time="2023-10-09T08:51:50Z" level=info msg="Querying https://production-ui-kuttl-test-capital-skunk.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search..." logger.go:42: 08:51:50 | production/6-check-NO-gaID | time="2023-10-09T08:51:50Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 08:51:50 | production/6-check-NO-gaID | time="2023-10-09T08:51:50Z" level=info msg="Polling to https://production-ui-kuttl-test-capital-skunk.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search" logger.go:42: 08:51:50 | production/6-check-NO-gaID | time="2023-10-09T08:51:50Z" level=info msg="Doing request number 0" logger.go:42: 08:51:50 | production/6-check-NO-gaID | time="2023-10-09T08:51:50Z" level=info msg="Content not found and asserted it was not found!" logger.go:42: 08:51:50 | production/6-check-NO-gaID | time="2023-10-09T08:51:50Z" level=info msg="Success!" logger.go:42: 08:51:50 | production/6-check-NO-gaID | test step completed 6-check-NO-gaID logger.go:42: 08:51:50 | production/7-add-tracking-id | starting test step 7-add-tracking-id logger.go:42: 08:51:50 | production/7-add-tracking-id | running command: [sh -c kubectl apply -f add-tracking-id.yaml -n $NAMESPACE] logger.go:42: 08:51:51 | production/7-add-tracking-id | jaeger.jaegertracing.io/production-ui configured logger.go:42: 08:51:51 | production/7-add-tracking-id | test step completed 7-add-tracking-id logger.go:42: 08:51:51 | production/8-check-gaID | starting test step 8-check-gaID logger.go:42: 08:51:51 | production/8-check-gaID | running command: [./ensure-ingress-host.sh] logger.go:42: 08:51:51 | production/8-check-gaID | Checking the Ingress host value was populated logger.go:42: 08:51:51 | production/8-check-gaID | Try number 0 logger.go:42: 08:51:51 | production/8-check-gaID | Hostname is production-ui-kuttl-test-capital-skunk.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com logger.go:42: 08:51:51 | production/8-check-gaID | running command: [sh -c ASSERT_PRESENT=true EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 08:51:51 | production/8-check-gaID | time="2023-10-09T08:51:51Z" level=info msg="Querying https://production-ui-kuttl-test-capital-skunk.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search..." logger.go:42: 08:51:51 | production/8-check-gaID | time="2023-10-09T08:51:51Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 08:51:51 | production/8-check-gaID | time="2023-10-09T08:51:51Z" level=info msg="Polling to https://production-ui-kuttl-test-capital-skunk.apps.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com/search" logger.go:42: 08:51:51 | production/8-check-gaID | time="2023-10-09T08:51:51Z" level=info msg="Doing request number 0" logger.go:42: 08:51:51 | production/8-check-gaID | time="2023-10-09T08:51:51Z" level=warning msg="Found: false . Assert: true" logger.go:42: 08:51:51 | production/8-check-gaID | time="2023-10-09T08:51:51Z" level=warning msg="The condition of the test function was not accomplished" logger.go:42: 08:51:51 | production/8-check-gaID | time="2023-10-09T08:51:51Z" level=info msg="Doing request number 1" logger.go:42: 08:51:51 | production/8-check-gaID | time="2023-10-09T08:51:51Z" level=warning msg="Found: false . Assert: true" logger.go:42: 08:51:51 | production/8-check-gaID | time="2023-10-09T08:51:51Z" level=warning msg="The condition of the test function was not accomplished" logger.go:42: 08:51:59 | production/8-check-gaID | time="2023-10-09T08:51:59Z" level=info msg="Doing request number 2" logger.go:42: 08:51:59 | production/8-check-gaID | time="2023-10-09T08:51:59Z" level=info msg="Content found and asserted!" logger.go:42: 08:51:59 | production/8-check-gaID | time="2023-10-09T08:51:59Z" level=info msg="Success!" logger.go:42: 08:51:59 | production/8-check-gaID | test step completed 8-check-gaID logger.go:42: 08:51:59 | production | production events from ns kuttl-test-capital-skunk: logger.go:42: 08:51:59 | production | 2023-10-09 08:50:42 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestcapitalskunkproductionui-1-6f8b69bfb9 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestcapitalskunkproductionui-1-6f8bnstmw replicaset-controller logger.go:42: 08:51:59 | production | 2023-10-09 08:50:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcapitalskunkproductionui-1-6f8bnstmw Binding Scheduled Successfully assigned kuttl-test-capital-skunk/elasticsearch-cdm-kuttltestcapitalskunkproductionui-1-6f8bnstmw to ip-10-0-21-71.ec2.internal default-scheduler logger.go:42: 08:51:59 | production | 2023-10-09 08:50:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcapitalskunkproductionui-1-6f8bnstmw AddedInterface Add eth0 [10.128.2.93/23] from ovn-kubernetes logger.go:42: 08:51:59 | production | 2023-10-09 08:50:42 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestcapitalskunkproductionui-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestcapitalskunkproductionui-1-6f8b69bfb9 to 1 deployment-controller logger.go:42: 08:51:59 | production | 2023-10-09 08:50:43 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcapitalskunkproductionui-1-6f8bnstmw.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:461ba613024688d9e0d310ea153118a9f4ccde38f9dda1e3f12f0c51590f0910" already present on machine kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:50:43 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcapitalskunkproductionui-1-6f8bnstmw.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:50:43 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcapitalskunkproductionui-1-6f8bnstmw.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:50:43 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcapitalskunkproductionui-1-6f8bnstmw.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:6375f305631524f9fadf07747c4693a3c58a669fe5a53c1813084d721230f2b2" already present on machine kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:50:43 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcapitalskunkproductionui-1-6f8bnstmw.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:50:43 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcapitalskunkproductionui-1-6f8bnstmw.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:50:58 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestcapitalskunkproductionui-1-6f8bnstmw.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:09 +0000 UTC Normal Pod production-ui-collector-7696dc959d-8g46b Binding Scheduled Successfully assigned kuttl-test-capital-skunk/production-ui-collector-7696dc959d-8g46b to ip-10-0-98-173.ec2.internal default-scheduler logger.go:42: 08:51:59 | production | 2023-10-09 08:51:09 +0000 UTC Normal Pod production-ui-collector-7696dc959d-8g46b AddedInterface Add eth0 [10.129.2.103/23] from ovn-kubernetes logger.go:42: 08:51:59 | production | 2023-10-09 08:51:09 +0000 UTC Normal Pod production-ui-collector-7696dc959d-8g46b.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:285d31a2abc603282ff42c12e7e59a971505c0823f30d05286bd844d5739cd6c" already present on machine kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:09 +0000 UTC Normal Pod production-ui-collector-7696dc959d-8g46b.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:09 +0000 UTC Normal Pod production-ui-collector-7696dc959d-8g46b.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:09 +0000 UTC Normal ReplicaSet.apps production-ui-collector-7696dc959d SuccessfulCreate Created pod: production-ui-collector-7696dc959d-8g46b replicaset-controller logger.go:42: 08:51:59 | production | 2023-10-09 08:51:09 +0000 UTC Normal Deployment.apps production-ui-collector ScalingReplicaSet Scaled up replica set production-ui-collector-7696dc959d to 1 deployment-controller logger.go:42: 08:51:59 | production | 2023-10-09 08:51:09 +0000 UTC Normal Pod production-ui-query-c786f94c-fbn7s Binding Scheduled Successfully assigned kuttl-test-capital-skunk/production-ui-query-c786f94c-fbn7s to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:51:59 | production | 2023-10-09 08:51:09 +0000 UTC Normal Pod production-ui-query-c786f94c-fbn7s AddedInterface Add eth0 [10.131.0.74/23] from ovn-kubernetes logger.go:42: 08:51:59 | production | 2023-10-09 08:51:09 +0000 UTC Normal Pod production-ui-query-c786f94c-fbn7s.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:09 +0000 UTC Normal Pod production-ui-query-c786f94c-fbn7s.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:09 +0000 UTC Normal Pod production-ui-query-c786f94c-fbn7s.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:09 +0000 UTC Normal Pod production-ui-query-c786f94c-fbn7s.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:09 +0000 UTC Normal ReplicaSet.apps production-ui-query-c786f94c SuccessfulCreate Created pod: production-ui-query-c786f94c-fbn7s replicaset-controller logger.go:42: 08:51:59 | production | 2023-10-09 08:51:09 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-c786f94c to 1 deployment-controller logger.go:42: 08:51:59 | production | 2023-10-09 08:51:10 +0000 UTC Warning Pod production-ui-collector-7696dc959d-8g46b.spec.containers{jaeger-collector} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:10 +0000 UTC Normal Pod production-ui-query-c786f94c-fbn7s.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:10 +0000 UTC Normal Pod production-ui-query-c786f94c-fbn7s.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:10 +0000 UTC Normal Pod production-ui-query-c786f94c-fbn7s.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:10 +0000 UTC Normal Pod production-ui-query-c786f94c-fbn7s.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:10 +0000 UTC Normal Pod production-ui-query-c786f94c-fbn7s.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:24 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:51:59 | production | 2023-10-09 08:51:24 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:51:59 | production | 2023-10-09 08:51:24 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:51:59 | production | 2023-10-09 08:51:25 +0000 UTC Normal Pod production-ui-query-558d7646b6-4mf9n Binding Scheduled Successfully assigned kuttl-test-capital-skunk/production-ui-query-558d7646b6-4mf9n to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:51:59 | production | 2023-10-09 08:51:25 +0000 UTC Normal ReplicaSet.apps production-ui-query-558d7646b6 SuccessfulCreate Created pod: production-ui-query-558d7646b6-4mf9n replicaset-controller logger.go:42: 08:51:59 | production | 2023-10-09 08:51:25 +0000 UTC Normal Pod production-ui-query-c786f94c-fbn7s.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:25 +0000 UTC Normal Pod production-ui-query-c786f94c-fbn7s.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:25 +0000 UTC Normal Pod production-ui-query-c786f94c-fbn7s.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:25 +0000 UTC Normal ReplicaSet.apps production-ui-query-c786f94c SuccessfulDelete Deleted pod: production-ui-query-c786f94c-fbn7s replicaset-controller logger.go:42: 08:51:59 | production | 2023-10-09 08:51:25 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-c786f94c to 0 from 1 deployment-controller logger.go:42: 08:51:59 | production | 2023-10-09 08:51:25 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-558d7646b6 to 1 deployment-controller logger.go:42: 08:51:59 | production | 2023-10-09 08:51:26 +0000 UTC Normal Pod production-ui-query-558d7646b6-4mf9n AddedInterface Add eth0 [10.131.0.75/23] from ovn-kubernetes logger.go:42: 08:51:59 | production | 2023-10-09 08:51:26 +0000 UTC Normal Pod production-ui-query-558d7646b6-4mf9n.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:26 +0000 UTC Normal Pod production-ui-query-558d7646b6-4mf9n.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:26 +0000 UTC Normal Pod production-ui-query-558d7646b6-4mf9n.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:26 +0000 UTC Normal Pod production-ui-query-558d7646b6-4mf9n.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d1b7a75e7c51c5ab3c446705a1b2f10b375d006d7411b191e5d017ae841eb0f1" already present on machine kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:26 +0000 UTC Normal Pod production-ui-query-558d7646b6-4mf9n.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:26 +0000 UTC Normal Pod production-ui-query-558d7646b6-4mf9n.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:26 +0000 UTC Normal Pod production-ui-query-558d7646b6-4mf9n.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:26 +0000 UTC Normal Pod production-ui-query-558d7646b6-4mf9n.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:26 +0000 UTC Normal Pod production-ui-query-558d7646b6-4mf9n.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:41 +0000 UTC Normal Pod production-ui-query-558d7646b6-4mf9n.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:41 +0000 UTC Normal Pod production-ui-query-558d7646b6-4mf9n.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:41 +0000 UTC Normal Pod production-ui-query-558d7646b6-4mf9n.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:41 +0000 UTC Normal ReplicaSet.apps production-ui-query-558d7646b6 SuccessfulDelete Deleted pod: production-ui-query-558d7646b6-4mf9n replicaset-controller logger.go:42: 08:51:59 | production | 2023-10-09 08:51:41 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-558d7646b6 to 0 from 1 deployment-controller logger.go:42: 08:51:59 | production | 2023-10-09 08:51:42 +0000 UTC Normal Pod production-ui-query-7958b5994-qhrwm Binding Scheduled Successfully assigned kuttl-test-capital-skunk/production-ui-query-7958b5994-qhrwm to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:51:59 | production | 2023-10-09 08:51:42 +0000 UTC Normal Pod production-ui-query-7958b5994-qhrwm AddedInterface Add eth0 [10.131.0.76/23] from ovn-kubernetes logger.go:42: 08:51:59 | production | 2023-10-09 08:51:42 +0000 UTC Normal Pod production-ui-query-7958b5994-qhrwm.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:42 +0000 UTC Normal Pod production-ui-query-7958b5994-qhrwm.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:42 +0000 UTC Normal Pod production-ui-query-7958b5994-qhrwm.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:42 +0000 UTC Normal Pod production-ui-query-7958b5994-qhrwm.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:42 +0000 UTC Normal Pod production-ui-query-7958b5994-qhrwm.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:42 +0000 UTC Normal Pod production-ui-query-7958b5994-qhrwm.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:42 +0000 UTC Normal ReplicaSet.apps production-ui-query-7958b5994 SuccessfulCreate Created pod: production-ui-query-7958b5994-qhrwm replicaset-controller logger.go:42: 08:51:59 | production | 2023-10-09 08:51:42 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-7958b5994 to 1 deployment-controller logger.go:42: 08:51:59 | production | 2023-10-09 08:51:52 +0000 UTC Normal Pod production-ui-query-7958b5994-qhrwm.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:52 +0000 UTC Normal Pod production-ui-query-7958b5994-qhrwm.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:52 +0000 UTC Warning Pod production-ui-query-7958b5994-qhrwm.spec.containers{jaeger-agent} Unhealthy Readiness probe failed: Get "http://10.131.0.76:14271/": dial tcp 10.131.0.76:14271: connect: connection refused kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:52 +0000 UTC Warning Pod production-ui-query-7958b5994-qhrwm.spec.containers{jaeger-query} Unhealthy Readiness probe failed: Get "http://10.131.0.76:16687/": dial tcp 10.131.0.76:16687: connect: connection refused kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:52 +0000 UTC Normal ReplicaSet.apps production-ui-query-7958b5994 SuccessfulDelete Deleted pod: production-ui-query-7958b5994-qhrwm replicaset-controller logger.go:42: 08:51:59 | production | 2023-10-09 08:51:52 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-7958b5994 to 0 from 1 deployment-controller logger.go:42: 08:51:59 | production | 2023-10-09 08:51:53 +0000 UTC Normal Pod production-ui-query-669448b567-kgkkz Binding Scheduled Successfully assigned kuttl-test-capital-skunk/production-ui-query-669448b567-kgkkz to ip-10-0-31-95.ec2.internal default-scheduler logger.go:42: 08:51:59 | production | 2023-10-09 08:51:53 +0000 UTC Normal Pod production-ui-query-669448b567-kgkkz AddedInterface Add eth0 [10.131.0.77/23] from ovn-kubernetes logger.go:42: 08:51:59 | production | 2023-10-09 08:51:53 +0000 UTC Normal Pod production-ui-query-669448b567-kgkkz.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:7104a134e167e8a896a01c0c23138e2d4bdbd3aeca76748f766e7d4ea4c726a8" already present on machine kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:53 +0000 UTC Normal Pod production-ui-query-669448b567-kgkkz.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:53 +0000 UTC Normal Pod production-ui-query-669448b567-kgkkz.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:53 +0000 UTC Normal Pod production-ui-query-669448b567-kgkkz.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:3f7623cd103dc76d393d956561a2db0b30dc49f4c9a09b457eeb152afe313276" already present on machine kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:53 +0000 UTC Normal Pod production-ui-query-669448b567-kgkkz.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:53 +0000 UTC Normal Pod production-ui-query-669448b567-kgkkz.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:51:59 | production | 2023-10-09 08:51:53 +0000 UTC Normal ReplicaSet.apps production-ui-query-669448b567 SuccessfulCreate Created pod: production-ui-query-669448b567-kgkkz replicaset-controller logger.go:42: 08:51:59 | production | 2023-10-09 08:51:53 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-669448b567 to 1 deployment-controller logger.go:42: 08:51:59 | production | Deleting namespace: kuttl-test-capital-skunk === CONT kuttl/harness/artifacts logger.go:42: 08:52:05 | artifacts | Creating namespace: kuttl-test-still-tomcat logger.go:42: 08:52:06 | artifacts | artifacts events from ns kuttl-test-still-tomcat: logger.go:42: 08:52:06 | artifacts | Deleting namespace: kuttl-test-still-tomcat === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (134.70s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/allinone (39.44s) --- PASS: kuttl/harness/production (89.17s) --- PASS: kuttl/harness/artifacts (6.05s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name ui --report --output /logs/artifacts/ui.xml ./artifacts/kuttl-report.xml time="2023-10-09T08:52:12Z" level=debug msg="Setting a new name for the test suites" time="2023-10-09T08:52:12Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-10-09T08:52:12Z" level=debug msg="normalizing test case names" time="2023-10-09T08:52:12Z" level=debug msg="ui/allinone -> ui_allinone" time="2023-10-09T08:52:12Z" level=debug msg="ui/production -> ui_production" time="2023-10-09T08:52:12Z" level=debug msg="ui/artifacts -> ui_artifacts" +---------------+--------+ | NAME | RESULT | +---------------+--------+ | ui_allinone | passed | | ui_production | passed | | ui_artifacts | passed | +---------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 2 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/sidecar.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/streaming.xml + '[' 3 -gt 0 ']' + count=2 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/ui.xml + '[' 0 -gt 0 ']' + '[' 2 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh upgrade false true + '[' 3 -ne 3 ']' + test_suite_name=upgrade + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/upgrade.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-upgrade make[2]: Entering directory '/tmp/jaeger-tests' make docker JAEGER_VERSION=1.49.1 IMG="quay.io//jaeger-operator:next" make[3]: Entering directory '/tmp/jaeger-tests' [ ! -z "true" ] || docker build --build-arg=GOPROXY= --build-arg=VERSION="1.49.0" --build-arg=JAEGER_VERSION=1.49.1 --build-arg=TARGETARCH= --build-arg VERSION_DATE=2023-10-09T08:52:12Z --build-arg VERSION_PKG="github.com/jaegertracing/jaeger-operator/pkg/version" -t "quay.io//jaeger-operator:next" . make[3]: Leaving directory '/tmp/jaeger-tests' touch build-e2e-upgrade-image SKIP_ES_EXTERNAL=true IMG=quay.io//jaeger-operator:"1.49.0" JAEGER_OPERATOR_VERSION="1.49.0" JAEGER_VERSION="1.49.0" ./tests/e2e/upgrade/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-10-06-234925 True False 86m Cluster version is 4.14.0-0.nightly-2023-10-06-234925' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.14.0-0.nightly-2023-10-06-234925 True False 86m Cluster version is 4.14.0-0.nightly-2023-10-06-234925' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/upgrade/render.sh ++ export SUITE_DIR=./tests/e2e/upgrade ++ SUITE_DIR=./tests/e2e/upgrade ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/upgrade ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + export JAEGER_NAME + '[' true = true ']' + skip_test upgrade 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade + warning 'upgrade: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade: Test not supported in OpenShift\e[0m' WAR: upgrade: Test not supported in OpenShift + '[' true = true ']' + skip_test upgrade-from-latest-release 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade-from-latest-release + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade-from-latest-release + warning 'upgrade-from-latest-release: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade-from-latest-release: Test not supported in OpenShift\e[0m' WAR: upgrade-from-latest-release: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running upgrade E2E tests' Running upgrade E2E tests + cd tests/e2e/upgrade/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1500832312 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-4w0gwlt0-a5461.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 08:52:13 | artifacts | Creating namespace: kuttl-test-hopeful-racer logger.go:42: 08:52:13 | artifacts | artifacts events from ns kuttl-test-hopeful-racer: logger.go:42: 08:52:13 | artifacts | Deleting namespace: kuttl-test-hopeful-racer === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (5.97s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.93s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name upgrade --report --output /logs/artifacts/upgrade.xml ./artifacts/kuttl-report.xml time="2023-10-09T08:52:19Z" level=debug msg="Setting a new name for the test suites" time="2023-10-09T08:52:19Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-10-09T08:52:19Z" level=debug msg="normalizing test case names" time="2023-10-09T08:52:19Z" level=debug msg="upgrade/artifacts -> upgrade_artifacts" +-------------------+--------+ | NAME | RESULT | +-------------------+--------+ | upgrade_artifacts | passed | +-------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 2 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/sidecar.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/streaming.xml + '[' 3 -gt 0 ']' + count=2 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/ui.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/upgrade.xml + '[' 0 -gt 0 ']' + '[' 2 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests'