% Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 3831 100 3831 0 0 33769 0 --:--:-- --:--:-- --:--:-- 33902 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 1953 100 1953 0 0 21006 0 --:--:-- --:--:-- --:--:-- 21228 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 350 100 350 0 0 3470 0 --:--:-- --:--:-- --:--:-- 3500 Installing kuttl Try 0... curl -sLo /tmp/jaeger-tests/hack/install/../../bin/kubectl-kuttl https://github.com/kudobuilder/kuttl/releases/download/v0.15.0/kubectl-kuttl_0.15.0_linux_x86_64 KUBECONFIG file is: /tmp/kubeconfig-282053367 for suite in elasticsearch examples generate miscellaneous sidecar streaming ui upgrade; do \ make run-e2e-tests-$suite ; \ done make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh elasticsearch false true + '[' 3 -ne 3 ']' + test_suite_name=elasticsearch + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/elasticsearch.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-elasticsearch make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true \ KAFKA_VERSION=3.6.0 \ SKIP_KAFKA=false \ ./tests/e2e/elasticsearch/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-09-012410 True False 8m57s Cluster version is 4.15.0-0.nightly-2023-12-09-012410' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-09-012410 True False 8m57s Cluster version is 4.15.0-0.nightly-2023-12-09-012410' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/elasticsearch/render.sh ++ export SUITE_DIR=./tests/e2e/elasticsearch ++ SUITE_DIR=./tests/e2e/elasticsearch ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/elasticsearch ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + start_test es-from-aio-to-production + '[' 1 -ne 1 ']' + test_name=es-from-aio-to-production + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-from-aio-to-production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-from-aio-to-production\e[0m' Rendering files for test es-from-aio-to-production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-from-aio-to-production + cd es-from-aio-to-production + jaeger_name=my-jaeger + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 03 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=03 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./03-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch.redundancyPolicy="ZeroRedundancy"' ./03-install.yaml + render_smoke_test my-jaeger true 04 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=04 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./04-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test es-increasing-replicas + '[' 1 -ne 1 ']' + test_name=es-increasing-replicas + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-increasing-replicas' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-increasing-replicas\e[0m' Rendering files for test es-increasing-replicas + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-from-aio-to-production + '[' es-from-aio-to-production '!=' _build ']' + cd .. + mkdir -p es-increasing-replicas + cd es-increasing-replicas + jaeger_name=simple-prod + '[' true = true ']' + jaeger_deployment_mode=production_autoprovisioned + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.replicas=2 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.query.replicas=2 ./02-install.yaml + cp ./01-assert.yaml ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.replicas=2 ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .status.readyReplicas=2 ./02-assert.yaml + render_smoke_test simple-prod true 03 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=03 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./03-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + cp ./02-install.yaml ./04-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.elasticsearch.nodeCount=2 ./04-install.yaml + /tmp/jaeger-tests/bin/gomplate -f ./openshift-check-es-nodes.yaml.template -o ./05-check-es-nodes.yaml + '[' true = true ']' + skip_test es-index-cleaner-upstream 'SKIP_ES_EXTERNAL is true' + '[' 2 -ne 2 ']' + test_name=es-index-cleaner-upstream + message='SKIP_ES_EXTERNAL is true' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-increasing-replicas + '[' es-increasing-replicas '!=' _build ']' + cd .. + rm -rf es-index-cleaner-upstream + warning 'es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true\e[0m' WAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true + '[' true = true ']' + es_index_cleaner -autoprov production_autoprovisioned + '[' 2 -ne 2 ']' + postfix=-autoprov + jaeger_deployment_strategy=production_autoprovisioned + start_test es-index-cleaner-autoprov + '[' 1 -ne 1 ']' + test_name=es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-index-cleaner-autoprov' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-index-cleaner-autoprov\e[0m' Rendering files for test es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-index-cleaner-autoprov + cd es-index-cleaner-autoprov + jaeger_name=test-es-index-cleaner-with-prefix + cronjob_name=test-es-index-cleaner-with-prefix-es-index-cleaner + secured_es_connection=false + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_managed_es ']' + ELASTICSEARCH_URL=https://elasticsearch + secured_es_connection=true + cp ../../es-index-cleaner-upstream/04-assert.yaml ../../es-index-cleaner-upstream/README.md . + render_install_jaeger test-es-index-cleaner-with-prefix production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options.es.index-prefix=""' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.enabled=false ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.numberOfDays=0 ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.esIndexCleaner.schedule="*/1 * * * *"' ./01-install.yaml + render_report_spans test-es-index-cleaner-with-prefix true 5 00 true 02 + '[' 6 -ne 6 ']' + jaeger=test-es-index-cleaner-with-prefix + is_secured=true + number_of_spans=5 + job_number=00 + ensure_reported_spans=true + test_step=02 + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + export JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + export JOB_NUMBER=00 + JOB_NUMBER=00 + export DAYS=5 + DAYS=5 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./02-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./02-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + sed 's~enabled: false~enabled: true~gi' ./01-install.yaml + CRONJOB_NAME=test-es-index-cleaner-with-prefix-es-index-cleaner + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/wait-for-cronjob-execution.yaml.template -o ./04-wait-es-index-cleaner.yaml + /tmp/jaeger-tests/bin/gomplate -f ./01-install.yaml -o ./05-install.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' 00 06 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + job_number=00 + test_step=06 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ sed 's/\\/\\\\/g' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=test-es-index-cleaner-with-prefix-curator + JOB_NUMBER=00 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + MOUNT_SECRET=test-es-index-cleaner-with-prefix-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./06-check-indices.yaml + JOB_NUMBER=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./06-assert.yaml + '[' true = true ']' + get_elasticsearch_openshift_operator_version + export ESO_OPERATOR_VERSION + '[' true = true ']' ++ kubectl get pods -l name=elasticsearch-operator --all-namespaces '-o=jsonpath={.items[0].metadata.annotations.operatorframework\.io/properties}' + properties='{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.0"}}]}' + '[' -z '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.0"}}]}' ']' ++ echo '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.0"}}]}' ++ /tmp/jaeger-tests/bin/yq e -P '.properties.[] | select(.value.packageName == "elasticsearch-operator") | .value.version' + ESO_OPERATOR_VERSION=5.8.0 ++ version_ge 5.8.0 5.4 +++ echo 5.8.0 5.4 +++ tr ' ' '\n' +++ sort -rV +++ head -n 1 ++ test 5.8.0 == 5.8.0 + '[' -n '' ']' + skip_test es-index-cleaner-managed 'Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 2 -ne 2 ']' + test_name=es-index-cleaner-managed + message='Test only supported with Elasticsearch OpenShift Operator >= 5.4' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-index-cleaner-autoprov + '[' es-index-cleaner-autoprov '!=' _build ']' + cd .. + rm -rf es-index-cleaner-managed + warning 'es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4\e[0m' WAR: es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4 + '[' true = true ']' + start_test es-multiinstance + '[' 1 -ne 1 ']' + test_name=es-multiinstance + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-multiinstance' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-multiinstance\e[0m' Rendering files for test es-multiinstance + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-multiinstance + cd es-multiinstance + jaeger_name=instance-1 + render_install_jaeger instance-1 production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=instance-1 + JAEGER_NAME=instance-1 + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/gomplate -f ./03-create-second-instance.yaml.template -o 03-create-second-instance.yaml + '[' true = true ']' + skip_test es-rollover-upstream 'SKIP_ES_EXTERNAL is true' + '[' 2 -ne 2 ']' + test_name=es-rollover-upstream + message='SKIP_ES_EXTERNAL is true' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-multiinstance + '[' es-multiinstance '!=' _build ']' + cd .. + rm -rf es-rollover-upstream + warning 'es-rollover-upstream: SKIP_ES_EXTERNAL is true' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-rollover-upstream: SKIP_ES_EXTERNAL is true\e[0m' WAR: es-rollover-upstream: SKIP_ES_EXTERNAL is true + '[' true = true ']' + es_rollover -autoprov production_autoprovisioned + '[' 2 -ne 2 ']' + postfix=-autoprov + jaeger_deployment_strategy=production_autoprovisioned + start_test es-rollover-autoprov + '[' 1 -ne 1 ']' + test_name=es-rollover-autoprov + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-rollover-autoprov' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-rollover-autoprov\e[0m' Rendering files for test es-rollover-autoprov + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-rollover-autoprov + cd es-rollover-autoprov + cp ../../es-rollover-upstream/05-assert.yaml ../../es-rollover-upstream/05-install.yaml ../../es-rollover-upstream/README.md . + jaeger_name=my-jaeger + secured_es_connection=false + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_managed_es ']' + ELASTICSEARCH_URL=https://elasticsearch + secured_es_connection=true + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_report_spans my-jaeger true 2 00 true 02 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=00 + ensure_reported_spans=true + test_step=02 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=00 + JOB_NUMBER=00 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./02-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./02-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' 00 03 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + job_number=00 + test_step=03 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=00 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./03-check-indices.yaml + JOB_NUMBER=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./03-assert.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' 01 04 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + job_number=01 + test_step=04 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=01 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./04-check-indices.yaml + JOB_NUMBER=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./04-assert.yaml + render_report_spans my-jaeger true 2 02 true 06 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=02 + ensure_reported_spans=true + test_step=06 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=02 + JOB_NUMBER=02 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./06-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./06-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' 02 07 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + job_number=02 + test_step=07 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=02 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./07-check-indices.yaml + JOB_NUMBER=02 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./07-assert.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' 03 08 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + job_number=03 + test_step=08 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=03 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./08-check-indices.yaml + JOB_NUMBER=03 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./08-assert.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' 04 09 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + job_number=04 + test_step=09 + escape_command ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=04 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./09-check-indices.yaml + JOB_NUMBER=04 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./09-assert.yaml + render_report_spans my-jaeger true 2 03 true 10 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=03 + ensure_reported_spans=true + test_step=10 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=03 + JOB_NUMBER=03 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./10-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./10-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + CRONJOB_NAME=my-jaeger-es-rollover + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/wait-for-cronjob-execution.yaml.template -o ./11-wait-rollover.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-000002'\'',' 05 11 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-000002'\'',' + job_number=05 + test_step=11 + escape_command ''\''--name'\'', '\''jaeger-span-000002'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-000002'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-000002'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-000002'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-000002'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=05 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-000002'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./11-check-indices.yaml + JOB_NUMBER=05 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./11-assert.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' 06 12 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + job_number=06 + test_step=12 + escape_command ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=06 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./12-check-indices.yaml + JOB_NUMBER=06 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./12-assert.yaml + '[' true = true ']' + get_elasticsearch_openshift_operator_version + export ESO_OPERATOR_VERSION + '[' true = true ']' ++ kubectl get pods -l name=elasticsearch-operator --all-namespaces '-o=jsonpath={.items[0].metadata.annotations.operatorframework\.io/properties}' + properties='{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.0"}}]}' + '[' -z '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.0"}}]}' ']' ++ echo '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.0"}}]}' ++ /tmp/jaeger-tests/bin/yq e -P '.properties.[] | select(.value.packageName == "elasticsearch-operator") | .value.version' + ESO_OPERATOR_VERSION=5.8.0 ++ version_ge 5.8.0 5.4 +++ echo 5.8.0 5.4 +++ tr ' ' '\n' +++ sort -rV +++ head -n 1 ++ test 5.8.0 == 5.8.0 + '[' -n '' ']' + skip_test es-rollover-managed 'Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 2 -ne 2 ']' + test_name=es-rollover-managed + message='Test only supported with Elasticsearch OpenShift Operator >= 5.4' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-rollover-autoprov + '[' es-rollover-autoprov '!=' _build ']' + cd .. + rm -rf es-rollover-managed + warning 'es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4\e[0m' WAR: es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4 + '[' true = true ']' + skip_test es-spark-dependencies 'This test is not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=es-spark-dependencies + message='This test is not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + rm -rf es-spark-dependencies + warning 'es-spark-dependencies: This test is not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-spark-dependencies: This test is not supported in OpenShift\e[0m' WAR: es-spark-dependencies: This test is not supported in OpenShift + [[ true = true ]] + [[ false = false ]] + start_test es-streaming-autoprovisioned + '[' 1 -ne 1 ']' + test_name=es-streaming-autoprovisioned + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-streaming-autoprovisioned' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-streaming-autoprovisioned\e[0m' Rendering files for test es-streaming-autoprovisioned + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-streaming-autoprovisioned + cd es-streaming-autoprovisioned + jaeger_name=auto-provisioned + render_assert_kafka true auto-provisioned 00 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provisioned + test_step=00 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./1-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_smoke_test auto-provisioned true 04 + '[' 3 -ne 3 ']' + jaeger=auto-provisioned + is_secured=true + test_step=04 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://auto-provisioned-query:443 + JAEGER_QUERY_ENDPOINT=https://auto-provisioned-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://auto-provisioned-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://auto-provisioned-collector-headless:14268 + export JAEGER_NAME=auto-provisioned + JAEGER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./04-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running elasticsearch E2E tests' Running elasticsearch E2E tests + cd tests/e2e/elasticsearch/_build + set +e + KUBECONFIG=/tmp/kubeconfig-282053367 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 8 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/es-from-aio-to-production === PAUSE kuttl/harness/es-from-aio-to-production === RUN kuttl/harness/es-increasing-replicas === PAUSE kuttl/harness/es-increasing-replicas === RUN kuttl/harness/es-index-cleaner-autoprov === PAUSE kuttl/harness/es-index-cleaner-autoprov === RUN kuttl/harness/es-multiinstance === PAUSE kuttl/harness/es-multiinstance === RUN kuttl/harness/es-rollover-autoprov === PAUSE kuttl/harness/es-rollover-autoprov === RUN kuttl/harness/es-simple-prod === PAUSE kuttl/harness/es-simple-prod === RUN kuttl/harness/es-streaming-autoprovisioned === PAUSE kuttl/harness/es-streaming-autoprovisioned === CONT kuttl/harness/artifacts logger.go:42: 06:55:12 | artifacts | Creating namespace: kuttl-test-profound-snake logger.go:42: 06:55:12 | artifacts | artifacts events from ns kuttl-test-profound-snake: logger.go:42: 06:55:12 | artifacts | Deleting namespace: kuttl-test-profound-snake === CONT kuttl/harness/es-multiinstance logger.go:42: 06:55:18 | es-multiinstance | Ignoring 03-create-second-instance.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:55:18 | es-multiinstance | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:55:18 | es-multiinstance | Creating namespace: kuttl-test-brave-weasel logger.go:42: 06:55:18 | es-multiinstance/0-clear-namespace | starting test step 0-clear-namespace logger.go:42: 06:55:18 | es-multiinstance/0-clear-namespace | running command: [sh -c kubectl delete namespace jaeger-e2e-multiinstance-test --ignore-not-found=true] logger.go:42: 06:55:18 | es-multiinstance/0-clear-namespace | test step completed 0-clear-namespace logger.go:42: 06:55:18 | es-multiinstance/1-install | starting test step 1-install logger.go:42: 06:55:18 | es-multiinstance/1-install | Jaeger:kuttl-test-brave-weasel/instance-1 created logger.go:42: 06:56:16 | es-multiinstance/1-install | test step completed 1-install logger.go:42: 06:56:16 | es-multiinstance/2-create-namespace | starting test step 2-create-namespace logger.go:42: 06:56:16 | es-multiinstance/2-create-namespace | running command: [sh -c kubectl create namespace jaeger-e2e-multiinstance-test] logger.go:42: 06:56:16 | es-multiinstance/2-create-namespace | namespace/jaeger-e2e-multiinstance-test created logger.go:42: 06:56:16 | es-multiinstance/2-create-namespace | test step completed 2-create-namespace logger.go:42: 06:56:16 | es-multiinstance/3-create-second-instance | starting test step 3-create-second-instance logger.go:42: 06:56:16 | es-multiinstance/3-create-second-instance | running command: [sh -c kubectl apply -f ./01-install.yaml -n jaeger-e2e-multiinstance-test] logger.go:42: 06:56:18 | es-multiinstance/3-create-second-instance | jaeger.jaegertracing.io/instance-1 created logger.go:42: 06:56:18 | es-multiinstance/3-create-second-instance | running command: [sh -c /tmp/jaeger-tests/bin/kubectl-kuttl assert ./01-assert.yaml -n jaeger-e2e-multiinstance-test --timeout 1000] logger.go:42: 06:57:04 | es-multiinstance/3-create-second-instance | assert is valid logger.go:42: 06:57:04 | es-multiinstance/3-create-second-instance | test step completed 3-create-second-instance logger.go:42: 06:57:04 | es-multiinstance/4-check-secrets | starting test step 4-check-secrets logger.go:42: 06:57:04 | es-multiinstance/4-check-secrets | running command: [sh -c kubectl get secrets elasticsearch -o jsonpath='{.data.logging-es\.crt}' -n $NAMESPACE > secret1] logger.go:42: 06:57:04 | es-multiinstance/4-check-secrets | running command: [sh -c kubectl get secrets elasticsearch -o jsonpath='{.data.logging-es\.crt}' -n jaeger-e2e-multiinstance-test > secret2] logger.go:42: 06:57:04 | es-multiinstance/4-check-secrets | running command: [sh -c cmp --silent secret1 secret2 || exit 0] logger.go:42: 06:57:04 | es-multiinstance/4-check-secrets | test step completed 4-check-secrets logger.go:42: 06:57:04 | es-multiinstance/5-delete | starting test step 5-delete logger.go:42: 06:57:04 | es-multiinstance/5-delete | running command: [sh -c kubectl delete namespace jaeger-e2e-multiinstance-test --wait=false] logger.go:42: 06:57:04 | es-multiinstance/5-delete | namespace "jaeger-e2e-multiinstance-test" deleted logger.go:42: 06:57:04 | es-multiinstance/5-delete | test step completed 5-delete logger.go:42: 06:57:04 | es-multiinstance | es-multiinstance events from ns kuttl-test-brave-weasel: logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:55:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbraveweaselinstance1-1-66ddc47742kz2 Binding Scheduled Successfully assigned kuttl-test-brave-weasel/elasticsearch-cdm-kuttltestbraveweaselinstance1-1-66ddc47742kz2 to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:55:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbraveweaselinstance1-1-66ddc47742kz2 AddedInterface Add eth0 [10.129.2.17/23] from ovn-kubernetes logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:55:26 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestbraveweaselinstance1-1-66ddc477d SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestbraveweaselinstance1-1-66ddc47742kz2 replicaset-controller logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:55:26 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestbraveweaselinstance1-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestbraveweaselinstance1-1-66ddc477d to 1 deployment-controller logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:55:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbraveweaselinstance1-1-66ddc47742kz2.spec.containers{elasticsearch} Pulling Pulling image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:55:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbraveweaselinstance1-1-66ddc47742kz2.spec.containers{elasticsearch} Pulled Successfully pulled image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" in 7.715s (7.715s including waiting) kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:55:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbraveweaselinstance1-1-66ddc47742kz2.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:55:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbraveweaselinstance1-1-66ddc47742kz2.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:55:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbraveweaselinstance1-1-66ddc47742kz2.spec.containers{proxy} Pulling Pulling image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:55:38 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbraveweaselinstance1-1-66ddc47742kz2.spec.containers{proxy} Pulled Successfully pulled image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" in 3.815s (3.815s including waiting) kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:55:38 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbraveweaselinstance1-1-66ddc47742kz2.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:55:38 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestbraveweaselinstance1-1-66ddc47742kz2.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:55:46 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestbraveweaselinstance1-1-66ddc47742kz2.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:55:52 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestbraveweaselinstance1-1-66ddc47742kz2.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:02 +0000 UTC Normal Pod instance-1-collector-657676cb4c-psdns Binding Scheduled Successfully assigned kuttl-test-brave-weasel/instance-1-collector-657676cb4c-psdns to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:02 +0000 UTC Normal ReplicaSet.apps instance-1-collector-657676cb4c SuccessfulCreate Created pod: instance-1-collector-657676cb4c-psdns replicaset-controller logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:02 +0000 UTC Normal Deployment.apps instance-1-collector ScalingReplicaSet Scaled up replica set instance-1-collector-657676cb4c to 1 deployment-controller logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:03 +0000 UTC Normal Pod instance-1-collector-657676cb4c-psdns AddedInterface Add eth0 [10.131.0.18/23] from ovn-kubernetes logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:03 +0000 UTC Normal Pod instance-1-collector-657676cb4c-psdns.spec.containers{jaeger-collector} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:03 +0000 UTC Normal Pod instance-1-query-55dbcf849c-fwmxs Binding Scheduled Successfully assigned kuttl-test-brave-weasel/instance-1-query-55dbcf849c-fwmxs to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:03 +0000 UTC Normal Pod instance-1-query-55dbcf849c-fwmxs AddedInterface Add eth0 [10.128.2.25/23] from ovn-kubernetes logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:03 +0000 UTC Normal Pod instance-1-query-55dbcf849c-fwmxs.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:03 +0000 UTC Normal ReplicaSet.apps instance-1-query-55dbcf849c SuccessfulCreate Created pod: instance-1-query-55dbcf849c-fwmxs replicaset-controller logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:03 +0000 UTC Normal Deployment.apps instance-1-query ScalingReplicaSet Scaled up replica set instance-1-query-55dbcf849c to 1 deployment-controller logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:07 +0000 UTC Normal Pod instance-1-collector-657676cb4c-psdns.spec.containers{jaeger-collector} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" in 4.688s (4.688s including waiting) kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:07 +0000 UTC Normal Pod instance-1-query-55dbcf849c-fwmxs.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" in 4.147s (4.147s including waiting) kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:07 +0000 UTC Normal Pod instance-1-query-55dbcf849c-fwmxs.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:07 +0000 UTC Normal Pod instance-1-query-55dbcf849c-fwmxs.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:07 +0000 UTC Normal Pod instance-1-query-55dbcf849c-fwmxs.spec.containers{oauth-proxy} Pulling Pulling image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:08 +0000 UTC Normal Pod instance-1-collector-657676cb4c-psdns.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:08 +0000 UTC Normal Pod instance-1-collector-657676cb4c-psdns.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:09 +0000 UTC Normal Pod instance-1-query-55dbcf849c-fwmxs.spec.containers{oauth-proxy} Pulled Successfully pulled image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" in 1.878s (1.878s including waiting) kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:09 +0000 UTC Normal Pod instance-1-query-55dbcf849c-fwmxs.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:09 +0000 UTC Normal Pod instance-1-query-55dbcf849c-fwmxs.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:09 +0000 UTC Normal Pod instance-1-query-55dbcf849c-fwmxs.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:14 +0000 UTC Normal Pod instance-1-query-55dbcf849c-fwmxs.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" in 4.483s (4.483s including waiting) kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:14 +0000 UTC Normal Pod instance-1-query-55dbcf849c-fwmxs.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:14 +0000 UTC Normal Pod instance-1-query-55dbcf849c-fwmxs.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:26 +0000 UTC Normal Pod instance-1-query-55dbcf849c-fwmxs.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:26 +0000 UTC Normal Pod instance-1-query-55dbcf849c-fwmxs.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:26 +0000 UTC Normal Pod instance-1-query-55dbcf849c-fwmxs.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:26 +0000 UTC Normal ReplicaSet.apps instance-1-query-55dbcf849c SuccessfulDelete Deleted pod: instance-1-query-55dbcf849c-fwmxs replicaset-controller logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:26 +0000 UTC Normal Deployment.apps instance-1-query ScalingReplicaSet Scaled down replica set instance-1-query-55dbcf849c to 0 from 1 deployment-controller logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:27 +0000 UTC Normal Pod instance-1-query-c4bb574f6-s4blh Binding Scheduled Successfully assigned kuttl-test-brave-weasel/instance-1-query-c4bb574f6-s4blh to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:27 +0000 UTC Normal Pod instance-1-query-c4bb574f6-s4blh AddedInterface Add eth0 [10.131.0.19/23] from ovn-kubernetes logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:27 +0000 UTC Normal Pod instance-1-query-c4bb574f6-s4blh.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:27 +0000 UTC Normal ReplicaSet.apps instance-1-query-c4bb574f6 SuccessfulCreate Created pod: instance-1-query-c4bb574f6-s4blh replicaset-controller logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:27 +0000 UTC Normal Deployment.apps instance-1-query ScalingReplicaSet Scaled up replica set instance-1-query-c4bb574f6 to 1 deployment-controller logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:29 +0000 UTC Normal Pod instance-1-query-c4bb574f6-s4blh.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" in 1.523s (1.523s including waiting) kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:29 +0000 UTC Normal Pod instance-1-query-c4bb574f6-s4blh.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:29 +0000 UTC Normal Pod instance-1-query-c4bb574f6-s4blh.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:29 +0000 UTC Normal Pod instance-1-query-c4bb574f6-s4blh.spec.containers{oauth-proxy} Pulling Pulling image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:32 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:32 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:32 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:33 +0000 UTC Normal Pod instance-1-query-c4bb574f6-s4blh.spec.containers{oauth-proxy} Pulled Successfully pulled image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" in 4.034s (4.034s including waiting) kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:33 +0000 UTC Normal Pod instance-1-query-c4bb574f6-s4blh.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:33 +0000 UTC Normal Pod instance-1-query-c4bb574f6-s4blh.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:33 +0000 UTC Normal Pod instance-1-query-c4bb574f6-s4blh.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:35 +0000 UTC Normal Pod instance-1-query-c4bb574f6-s4blh.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" in 2.286s (2.286s including waiting) kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:36 +0000 UTC Normal Pod instance-1-query-c4bb574f6-s4blh.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:56:36 +0000 UTC Normal Pod instance-1-query-c4bb574f6-s4blh.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:57:02 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:57:02 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod instance-1-collector-657676cb4c-psdns horizontal-pod-autoscaler logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:57:02 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:57:02 +0000 UTC Normal Pod instance-1-query-c4bb574f6-s4blh.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:57:02 +0000 UTC Normal Pod instance-1-query-c4bb574f6-s4blh.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:57:02 +0000 UTC Normal Pod instance-1-query-c4bb574f6-s4blh.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:57:02 +0000 UTC Normal ReplicaSet.apps instance-1-query-c4bb574f6 SuccessfulDelete Deleted pod: instance-1-query-c4bb574f6-s4blh replicaset-controller logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:57:02 +0000 UTC Normal Deployment.apps instance-1-query ScalingReplicaSet Scaled down replica set instance-1-query-c4bb574f6 to 0 from 1 deployment-controller logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:57:03 +0000 UTC Normal Pod instance-1-query-55dbcf849c-cr2lb Binding Scheduled Successfully assigned kuttl-test-brave-weasel/instance-1-query-55dbcf849c-cr2lb to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:57:03 +0000 UTC Normal Pod instance-1-query-55dbcf849c-cr2lb AddedInterface Add eth0 [10.131.0.22/23] from ovn-kubernetes logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:57:03 +0000 UTC Normal Pod instance-1-query-55dbcf849c-cr2lb.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:57:03 +0000 UTC Normal Pod instance-1-query-55dbcf849c-cr2lb.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:57:03 +0000 UTC Normal Pod instance-1-query-55dbcf849c-cr2lb.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:57:03 +0000 UTC Normal Pod instance-1-query-55dbcf849c-cr2lb.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:57:03 +0000 UTC Normal ReplicaSet.apps instance-1-query-55dbcf849c SuccessfulCreate Created pod: instance-1-query-55dbcf849c-cr2lb replicaset-controller logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:57:03 +0000 UTC Normal Deployment.apps instance-1-query ScalingReplicaSet Scaled up replica set instance-1-query-55dbcf849c to 1 from 0 deployment-controller logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:57:04 +0000 UTC Normal Pod instance-1-query-55dbcf849c-cr2lb.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:57:04 +0000 UTC Normal Pod instance-1-query-55dbcf849c-cr2lb.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:57:04 +0000 UTC Normal Pod instance-1-query-55dbcf849c-cr2lb.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:57:04 +0000 UTC Normal Pod instance-1-query-55dbcf849c-cr2lb.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:57:04 | es-multiinstance | 2023-12-11 06:57:04 +0000 UTC Normal Pod instance-1-query-55dbcf849c-cr2lb.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:57:04 | es-multiinstance | Deleting namespace: kuttl-test-brave-weasel === CONT kuttl/harness/es-streaming-autoprovisioned logger.go:42: 06:57:11 | es-streaming-autoprovisioned | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:57:11 | es-streaming-autoprovisioned | Creating namespace: kuttl-test-full-newt logger.go:42: 06:57:11 | es-streaming-autoprovisioned/0-install | starting test step 0-install logger.go:42: 06:57:11 | es-streaming-autoprovisioned/0-install | Jaeger:kuttl-test-full-newt/auto-provisioned created logger.go:42: 06:58:27 | es-streaming-autoprovisioned/0-install | test step completed 0-install logger.go:42: 06:58:27 | es-streaming-autoprovisioned/1- | starting test step 1- logger.go:42: 06:58:58 | es-streaming-autoprovisioned/1- | test step completed 1- logger.go:42: 06:58:58 | es-streaming-autoprovisioned/2- | starting test step 2- logger.go:42: 06:59:30 | es-streaming-autoprovisioned/2- | test step completed 2- logger.go:42: 06:59:30 | es-streaming-autoprovisioned/3- | starting test step 3- logger.go:42: 06:59:38 | es-streaming-autoprovisioned/3- | test step completed 3- logger.go:42: 06:59:38 | es-streaming-autoprovisioned/4-smoke-test | starting test step 4-smoke-test logger.go:42: 06:59:38 | es-streaming-autoprovisioned/4-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE auto-provisioned /dev/null] logger.go:42: 06:59:40 | es-streaming-autoprovisioned/4-smoke-test | Warning: resource jaegers/auto-provisioned is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 06:59:48 | es-streaming-autoprovisioned/4-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a JAEGER_COLLECTOR_ENDPOINT=http://auto-provisioned-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://auto-provisioned-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 06:59:48 | es-streaming-autoprovisioned/4-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 06:59:49 | es-streaming-autoprovisioned/4-smoke-test | job.batch/report-span created logger.go:42: 06:59:49 | es-streaming-autoprovisioned/4-smoke-test | job.batch/check-span created logger.go:42: 07:00:05 | es-streaming-autoprovisioned/4-smoke-test | test step completed 4-smoke-test logger.go:42: 07:00:05 | es-streaming-autoprovisioned | es-streaming-autoprovisioned events from ns kuttl-test-full-newt: logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:57:17 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestfullnewtautoprovisioned-1-67d5b86946 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestfullnewtautoprovisioned-1-67d5bskgnv replicaset-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:57:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfullnewtautoprovisioned-1-67d5bskgnv Binding Scheduled Successfully assigned kuttl-test-full-newt/elasticsearch-cdm-kuttltestfullnewtautoprovisioned-1-67d5bskgnv to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:57:17 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestfullnewtautoprovisioned-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestfullnewtautoprovisioned-1-67d5b86946 to 1 deployment-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:57:18 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfullnewtautoprovisioned-1-67d5bskgnv AddedInterface Add eth0 [10.129.2.18/23] from ovn-kubernetes logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:57:18 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfullnewtautoprovisioned-1-67d5bskgnv.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:57:18 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfullnewtautoprovisioned-1-67d5bskgnv.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:57:18 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfullnewtautoprovisioned-1-67d5bskgnv.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:57:18 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfullnewtautoprovisioned-1-67d5bskgnv.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:57:18 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfullnewtautoprovisioned-1-67d5bskgnv.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:57:18 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfullnewtautoprovisioned-1-67d5bskgnv.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:57:28 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestfullnewtautoprovisioned-1-67d5bskgnv.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:57:33 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestfullnewtautoprovisioned-1-67d5bskgnv.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:57:46 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-zookeeper NoPods No matching pods found controllermanager logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:57:46 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:57:46 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:57:46 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-full-newt/data-auto-provisioned-zookeeper-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-74b54944c8-99gpt_8201e0f8-9afa-4504-b698-cbc6cdf4803e logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:57:51 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-full-newt/auto-provisioned-zookeeper-0 to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:57:51 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-d745f1e2-2af2-46df-bd73-427809055712 ebs.csi.aws.com_aws-ebs-csi-driver-controller-74b54944c8-99gpt_8201e0f8-9afa-4504-b698-cbc6cdf4803e logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:57:53 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-d745f1e2-2af2-46df-bd73-427809055712" attachdetach-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:57:56 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 AddedInterface Add eth0 [10.131.0.23/23] from ovn-kubernetes logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:57:56 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulling Pulling image "registry.redhat.io/amq-streams/kafka-36-rhel8@sha256:55b18000b2e774adccd0d5393b58fc3b8358ab174eaaa3ada9d520b5cde30669" kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:58:04 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulled Successfully pulled image "registry.redhat.io/amq-streams/kafka-36-rhel8@sha256:55b18000b2e774adccd0d5393b58fc3b8358ab174eaaa3ada9d520b5cde30669" in 7.914s (7.914s including waiting) kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:58:04 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:58:04 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:58:28 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-kafka NoPods No matching pods found controllermanager logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:58:28 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:58:28 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:58:28 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-full-newt/data-0-auto-provisioned-kafka-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-74b54944c8-99gpt_8201e0f8-9afa-4504-b698-cbc6cdf4803e logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:58:33 +0000 UTC Normal Pod auto-provisioned-kafka-0 Binding Scheduled Successfully assigned kuttl-test-full-newt/auto-provisioned-kafka-0 to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:58:33 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-e974f876-4aad-40b4-ac38-2c41466aa04b ebs.csi.aws.com_aws-ebs-csi-driver-controller-74b54944c8-99gpt_8201e0f8-9afa-4504-b698-cbc6cdf4803e logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:58:35 +0000 UTC Normal Pod auto-provisioned-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-e974f876-4aad-40b4-ac38-2c41466aa04b" attachdetach-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:58:38 +0000 UTC Normal Pod auto-provisioned-kafka-0 AddedInterface Add eth0 [10.131.0.24/23] from ovn-kubernetes logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:58:38 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Pulled Container image "registry.redhat.io/amq-streams/kafka-36-rhel8@sha256:55b18000b2e774adccd0d5393b58fc3b8358ab174eaaa3ada9d520b5cde30669" already present on machine kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:58:38 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:58:38 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:58:59 +0000 UTC Normal Pod auto-provisioned-entity-operator-74ff7ff4f9-6rkhd Binding Scheduled Successfully assigned kuttl-test-full-newt/auto-provisioned-entity-operator-74ff7ff4f9-6rkhd to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:58:59 +0000 UTC Normal ReplicaSet.apps auto-provisioned-entity-operator-74ff7ff4f9 SuccessfulCreate Created pod: auto-provisioned-entity-operator-74ff7ff4f9-6rkhd replicaset-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:58:59 +0000 UTC Normal Deployment.apps auto-provisioned-entity-operator ScalingReplicaSet Scaled up replica set auto-provisioned-entity-operator-74ff7ff4f9 to 1 deployment-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:00 +0000 UTC Normal Pod auto-provisioned-entity-operator-74ff7ff4f9-6rkhd AddedInterface Add eth0 [10.128.2.27/23] from ovn-kubernetes logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:00 +0000 UTC Normal Pod auto-provisioned-entity-operator-74ff7ff4f9-6rkhd.spec.containers{topic-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:f1be1aa2f18276f9169893eb55e3733cd52fa38f2101a9b3925f79774841689f" already present on machine kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:00 +0000 UTC Normal Pod auto-provisioned-entity-operator-74ff7ff4f9-6rkhd.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:00 +0000 UTC Normal Pod auto-provisioned-entity-operator-74ff7ff4f9-6rkhd.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:00 +0000 UTC Normal Pod auto-provisioned-entity-operator-74ff7ff4f9-6rkhd.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:f1be1aa2f18276f9169893eb55e3733cd52fa38f2101a9b3925f79774841689f" already present on machine kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:00 +0000 UTC Normal Pod auto-provisioned-entity-operator-74ff7ff4f9-6rkhd.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:00 +0000 UTC Normal Pod auto-provisioned-entity-operator-74ff7ff4f9-6rkhd.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:00 +0000 UTC Normal Pod auto-provisioned-entity-operator-74ff7ff4f9-6rkhd.spec.containers{tls-sidecar} Pulling Pulling image "registry.redhat.io/amq-streams/kafka-36-rhel8@sha256:55b18000b2e774adccd0d5393b58fc3b8358ab174eaaa3ada9d520b5cde30669" kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:10 +0000 UTC Normal Pod auto-provisioned-entity-operator-74ff7ff4f9-6rkhd.spec.containers{tls-sidecar} Pulled Successfully pulled image "registry.redhat.io/amq-streams/kafka-36-rhel8@sha256:55b18000b2e774adccd0d5393b58fc3b8358ab174eaaa3ada9d520b5cde30669" in 9.646s (9.646s including waiting) kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:10 +0000 UTC Normal Pod auto-provisioned-entity-operator-74ff7ff4f9-6rkhd.spec.containers{tls-sidecar} Created Created container tls-sidecar kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:10 +0000 UTC Normal Pod auto-provisioned-entity-operator-74ff7ff4f9-6rkhd.spec.containers{tls-sidecar} Started Started container tls-sidecar kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:32 +0000 UTC Normal Pod auto-provisioned-collector-799d9454cb-wp96r Binding Scheduled Successfully assigned kuttl-test-full-newt/auto-provisioned-collector-799d9454cb-wp96r to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:32 +0000 UTC Normal ReplicaSet.apps auto-provisioned-collector-799d9454cb SuccessfulCreate Created pod: auto-provisioned-collector-799d9454cb-wp96r replicaset-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:32 +0000 UTC Normal Deployment.apps auto-provisioned-collector ScalingReplicaSet Scaled up replica set auto-provisioned-collector-799d9454cb to 1 deployment-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:32 +0000 UTC Normal Pod auto-provisioned-ingester-7b6f446856-ft2zc Binding Scheduled Successfully assigned kuttl-test-full-newt/auto-provisioned-ingester-7b6f446856-ft2zc to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:32 +0000 UTC Normal ReplicaSet.apps auto-provisioned-ingester-7b6f446856 SuccessfulCreate Created pod: auto-provisioned-ingester-7b6f446856-ft2zc replicaset-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:32 +0000 UTC Normal Deployment.apps auto-provisioned-ingester ScalingReplicaSet Scaled up replica set auto-provisioned-ingester-7b6f446856 to 1 deployment-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:32 +0000 UTC Normal Pod auto-provisioned-query-66585b8b47-lfd6f Binding Scheduled Successfully assigned kuttl-test-full-newt/auto-provisioned-query-66585b8b47-lfd6f to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:32 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-66585b8b47 SuccessfulCreate Created pod: auto-provisioned-query-66585b8b47-lfd6f replicaset-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:32 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-66585b8b47 to 1 deployment-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:33 +0000 UTC Normal Pod auto-provisioned-collector-799d9454cb-wp96r AddedInterface Add eth0 [10.131.0.25/23] from ovn-kubernetes logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:33 +0000 UTC Normal Pod auto-provisioned-collector-799d9454cb-wp96r.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:33 +0000 UTC Normal Pod auto-provisioned-collector-799d9454cb-wp96r.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:33 +0000 UTC Normal Pod auto-provisioned-collector-799d9454cb-wp96r.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:33 +0000 UTC Normal Pod auto-provisioned-ingester-7b6f446856-ft2zc AddedInterface Add eth0 [10.131.0.26/23] from ovn-kubernetes logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:33 +0000 UTC Normal Pod auto-provisioned-ingester-7b6f446856-ft2zc.spec.containers{jaeger-ingester} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:849018528225b7370cc4740fc9f94bef7ffd4195328a916a6013d88f885eebe2" kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:33 +0000 UTC Normal Pod auto-provisioned-query-66585b8b47-lfd6f AddedInterface Add eth0 [10.128.2.28/23] from ovn-kubernetes logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:33 +0000 UTC Normal Pod auto-provisioned-query-66585b8b47-lfd6f.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:33 +0000 UTC Normal Pod auto-provisioned-query-66585b8b47-lfd6f.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:33 +0000 UTC Normal Pod auto-provisioned-query-66585b8b47-lfd6f.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:33 +0000 UTC Normal Pod auto-provisioned-query-66585b8b47-lfd6f.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:33 +0000 UTC Normal Pod auto-provisioned-query-66585b8b47-lfd6f.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:33 +0000 UTC Normal Pod auto-provisioned-query-66585b8b47-lfd6f.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:33 +0000 UTC Normal Pod auto-provisioned-query-66585b8b47-lfd6f.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:33 +0000 UTC Normal Pod auto-provisioned-query-66585b8b47-lfd6f.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:34 +0000 UTC Normal Pod auto-provisioned-query-66585b8b47-lfd6f.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:36 +0000 UTC Normal Pod auto-provisioned-ingester-7b6f446856-ft2zc.spec.containers{jaeger-ingester} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:849018528225b7370cc4740fc9f94bef7ffd4195328a916a6013d88f885eebe2" in 2.695s (2.695s including waiting) kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:36 +0000 UTC Normal Pod auto-provisioned-ingester-7b6f446856-ft2zc.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:36 +0000 UTC Normal Pod auto-provisioned-ingester-7b6f446856-ft2zc.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:41 +0000 UTC Normal Pod auto-provisioned-query-66585b8b47-lfd6f.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:41 +0000 UTC Normal Pod auto-provisioned-query-66585b8b47-lfd6f.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:41 +0000 UTC Normal Pod auto-provisioned-query-66585b8b47-lfd6f.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:41 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-66585b8b47 SuccessfulDelete Deleted pod: auto-provisioned-query-66585b8b47-lfd6f replicaset-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:41 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled down replica set auto-provisioned-query-66585b8b47 to 0 from 1 deployment-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:43 +0000 UTC Normal Pod auto-provisioned-query-cc78f8fb5-8nw92 Binding Scheduled Successfully assigned kuttl-test-full-newt/auto-provisioned-query-cc78f8fb5-8nw92 to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:43 +0000 UTC Normal Pod auto-provisioned-query-cc78f8fb5-8nw92 AddedInterface Add eth0 [10.128.2.29/23] from ovn-kubernetes logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:43 +0000 UTC Normal Pod auto-provisioned-query-cc78f8fb5-8nw92.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:43 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-cc78f8fb5 SuccessfulCreate Created pod: auto-provisioned-query-cc78f8fb5-8nw92 replicaset-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:43 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-cc78f8fb5 to 1 deployment-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:44 +0000 UTC Normal Pod auto-provisioned-query-cc78f8fb5-8nw92.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:44 +0000 UTC Normal Pod auto-provisioned-query-cc78f8fb5-8nw92.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:44 +0000 UTC Normal Pod auto-provisioned-query-cc78f8fb5-8nw92.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:44 +0000 UTC Normal Pod auto-provisioned-query-cc78f8fb5-8nw92.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:44 +0000 UTC Normal Pod auto-provisioned-query-cc78f8fb5-8nw92.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:44 +0000 UTC Normal Pod auto-provisioned-query-cc78f8fb5-8nw92.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:44 +0000 UTC Normal Pod auto-provisioned-query-cc78f8fb5-8nw92.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:44 +0000 UTC Normal Pod auto-provisioned-query-cc78f8fb5-8nw92.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:48 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:48 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:49 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:49 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:49 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:49 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:49 +0000 UTC Normal Pod check-span-mvwrj Binding Scheduled Successfully assigned kuttl-test-full-newt/check-span-mvwrj to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:49 +0000 UTC Normal Pod check-span-mvwrj AddedInterface Add eth0 [10.131.0.28/23] from ovn-kubernetes logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:49 +0000 UTC Normal Pod check-span-mvwrj.spec.containers{asserts-container} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:49 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-mvwrj job-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:49 +0000 UTC Normal Pod report-span-h5jvk Binding Scheduled Successfully assigned kuttl-test-full-newt/report-span-h5jvk to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:49 +0000 UTC Normal Pod report-span-h5jvk AddedInterface Add eth0 [10.131.0.27/23] from ovn-kubernetes logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:49 +0000 UTC Normal Pod report-span-h5jvk.spec.containers{report-span} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:49 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-h5jvk job-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:54 +0000 UTC Normal Pod check-span-mvwrj.spec.containers{asserts-container} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" in 4.673s (4.673s including waiting) kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:54 +0000 UTC Normal Pod check-span-mvwrj.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:54 +0000 UTC Normal Pod check-span-mvwrj.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:54 +0000 UTC Normal Pod report-span-h5jvk.spec.containers{report-span} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" in 4.742s (4.742s including waiting) kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:54 +0000 UTC Normal Pod report-span-h5jvk.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 06:59:54 +0000 UTC Normal Pod report-span-h5jvk.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:00:05 | es-streaming-autoprovisioned | 2023-12-11 07:00:04 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:00:05 | es-streaming-autoprovisioned | Deleting namespace: kuttl-test-full-newt === CONT kuttl/harness/es-simple-prod logger.go:42: 07:00:18 | es-simple-prod | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:00:18 | es-simple-prod | Creating namespace: kuttl-test-coherent-snail logger.go:42: 07:00:18 | es-simple-prod | es-simple-prod events from ns kuttl-test-coherent-snail: logger.go:42: 07:00:18 | es-simple-prod | Deleting namespace: kuttl-test-coherent-snail === CONT kuttl/harness/es-rollover-autoprov logger.go:42: 07:00:24 | es-rollover-autoprov | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:00:24 | es-rollover-autoprov | Creating namespace: kuttl-test-leading-hound logger.go:42: 07:00:24 | es-rollover-autoprov/1-install | starting test step 1-install logger.go:42: 07:00:24 | es-rollover-autoprov/1-install | Jaeger:kuttl-test-leading-hound/my-jaeger created logger.go:42: 07:01:00 | es-rollover-autoprov/1-install | test step completed 1-install logger.go:42: 07:01:00 | es-rollover-autoprov/2-report-spans | starting test step 2-report-spans logger.go:42: 07:01:00 | es-rollover-autoprov/2-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:01:02 | es-rollover-autoprov/2-report-spans | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:01:08 | es-rollover-autoprov/2-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a JOB_NUMBER=00 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-00-job.yaml] logger.go:42: 07:01:09 | es-rollover-autoprov/2-report-spans | running command: [sh -c kubectl apply -f report-span-00-job.yaml -n $NAMESPACE] logger.go:42: 07:01:09 | es-rollover-autoprov/2-report-spans | job.batch/00-report-span created logger.go:42: 07:01:33 | es-rollover-autoprov/2-report-spans | test step completed 2-report-spans logger.go:42: 07:01:33 | es-rollover-autoprov/3-check-indices | starting test step 3-check-indices logger.go:42: 07:01:33 | es-rollover-autoprov/3-check-indices | Job:kuttl-test-leading-hound/00-check-indices created logger.go:42: 07:01:36 | es-rollover-autoprov/3-check-indices | test step completed 3-check-indices logger.go:42: 07:01:36 | es-rollover-autoprov/4-check-indices | starting test step 4-check-indices logger.go:42: 07:01:36 | es-rollover-autoprov/4-check-indices | Job:kuttl-test-leading-hound/01-check-indices created logger.go:42: 07:01:39 | es-rollover-autoprov/4-check-indices | test step completed 4-check-indices logger.go:42: 07:01:39 | es-rollover-autoprov/5-install | starting test step 5-install logger.go:42: 07:01:39 | es-rollover-autoprov/5-install | Jaeger:kuttl-test-leading-hound/my-jaeger updated logger.go:42: 07:01:53 | es-rollover-autoprov/5-install | test step completed 5-install logger.go:42: 07:01:53 | es-rollover-autoprov/6-report-spans | starting test step 6-report-spans logger.go:42: 07:01:53 | es-rollover-autoprov/6-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:02:02 | es-rollover-autoprov/6-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a JOB_NUMBER=02 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-02-job.yaml] logger.go:42: 07:02:12 | es-rollover-autoprov/6-report-spans | running command: [sh -c kubectl apply -f report-span-02-job.yaml -n $NAMESPACE] logger.go:42: 07:02:12 | es-rollover-autoprov/6-report-spans | job.batch/02-report-span created logger.go:42: 07:02:39 | es-rollover-autoprov/6-report-spans | test step completed 6-report-spans logger.go:42: 07:02:39 | es-rollover-autoprov/7-check-indices | starting test step 7-check-indices logger.go:42: 07:02:39 | es-rollover-autoprov/7-check-indices | Job:kuttl-test-leading-hound/02-check-indices created logger.go:42: 07:02:43 | es-rollover-autoprov/7-check-indices | test step completed 7-check-indices logger.go:42: 07:02:43 | es-rollover-autoprov/8-check-indices | starting test step 8-check-indices logger.go:42: 07:02:43 | es-rollover-autoprov/8-check-indices | Job:kuttl-test-leading-hound/03-check-indices created logger.go:42: 07:02:46 | es-rollover-autoprov/8-check-indices | test step completed 8-check-indices logger.go:42: 07:02:46 | es-rollover-autoprov/9-check-indices | starting test step 9-check-indices logger.go:42: 07:02:46 | es-rollover-autoprov/9-check-indices | Job:kuttl-test-leading-hound/04-check-indices created logger.go:42: 07:02:49 | es-rollover-autoprov/9-check-indices | test step completed 9-check-indices logger.go:42: 07:02:49 | es-rollover-autoprov/10-report-spans | starting test step 10-report-spans logger.go:42: 07:02:49 | es-rollover-autoprov/10-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:02:57 | es-rollover-autoprov/10-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a JOB_NUMBER=03 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-03-job.yaml] logger.go:42: 07:02:58 | es-rollover-autoprov/10-report-spans | running command: [sh -c kubectl apply -f report-span-03-job.yaml -n $NAMESPACE] logger.go:42: 07:02:58 | es-rollover-autoprov/10-report-spans | job.batch/03-report-span created logger.go:42: 07:03:22 | es-rollover-autoprov/10-report-spans | test step completed 10-report-spans logger.go:42: 07:03:22 | es-rollover-autoprov/11-check-indices | starting test step 11-check-indices logger.go:42: 07:03:22 | es-rollover-autoprov/11-check-indices | running command: [sh -c go run ../../../../cmd-utils/wait-cronjob/main.go --cronjob my-jaeger-es-rollover --namespace $NAMESPACE] logger.go:42: 07:06:13 | es-rollover-autoprov/11-check-indices | time="2023-12-11T07:06:13Z" level=debug msg="Checking if the my-jaeger-es-rollover CronJob exists" logger.go:42: 07:06:13 | es-rollover-autoprov/11-check-indices | time="2023-12-11T07:06:13Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 07:06:13 | es-rollover-autoprov/11-check-indices | time="2023-12-11T07:06:13Z" level=info msg="Cronjob my-jaeger-es-rollover found successfully" logger.go:42: 07:06:13 | es-rollover-autoprov/11-check-indices | time="2023-12-11T07:06:13Z" level=debug msg="Waiting for the next scheduled job from my-jaeger-es-rollover cronjob" logger.go:42: 07:06:13 | es-rollover-autoprov/11-check-indices | time="2023-12-11T07:06:13Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 07:06:13 | es-rollover-autoprov/11-check-indices | time="2023-12-11T07:06:13Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 07:06:24 | es-rollover-autoprov/11-check-indices | time="2023-12-11T07:06:24Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 07:06:34 | es-rollover-autoprov/11-check-indices | time="2023-12-11T07:06:34Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 07:06:44 | es-rollover-autoprov/11-check-indices | time="2023-12-11T07:06:44Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 07:06:54 | es-rollover-autoprov/11-check-indices | time="2023-12-11T07:06:54Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 07:07:04 | es-rollover-autoprov/11-check-indices | time="2023-12-11T07:07:04Z" level=info msg="Job of owner my-jaeger-es-rollover succeeded after my-jaeger-es-rollover 50.100973147s" logger.go:42: 07:07:04 | es-rollover-autoprov/11-check-indices | Job:kuttl-test-leading-hound/05-check-indices created logger.go:42: 07:07:07 | es-rollover-autoprov/11-check-indices | test step completed 11-check-indices logger.go:42: 07:07:07 | es-rollover-autoprov/12-check-indices | starting test step 12-check-indices logger.go:42: 07:07:07 | es-rollover-autoprov/12-check-indices | Job:kuttl-test-leading-hound/06-check-indices created logger.go:42: 07:07:10 | es-rollover-autoprov/12-check-indices | test step completed 12-check-indices logger.go:42: 07:07:10 | es-rollover-autoprov | es-rollover-autoprov events from ns kuttl-test-leading-hound: logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:30 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestleadinghoundmyjaeger-1-5674bd7f9d SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestleadinghoundmyjaeger-1-5674bd7fvzxbh replicaset-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestleadinghoundmyjaeger-1-5674bd7fvzxbh Binding Scheduled Successfully assigned kuttl-test-leading-hound/elasticsearch-cdm-kuttltestleadinghoundmyjaeger-1-5674bd7fvzxbh to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:30 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestleadinghoundmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestleadinghoundmyjaeger-1-5674bd7f9d to 1 deployment-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestleadinghoundmyjaeger-1-5674bd7fvzxbh AddedInterface Add eth0 [10.129.2.19/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestleadinghoundmyjaeger-1-5674bd7fvzxbh.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestleadinghoundmyjaeger-1-5674bd7fvzxbh.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestleadinghoundmyjaeger-1-5674bd7fvzxbh.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestleadinghoundmyjaeger-1-5674bd7fvzxbh.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestleadinghoundmyjaeger-1-5674bd7fvzxbh.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestleadinghoundmyjaeger-1-5674bd7fvzxbh.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:41 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestleadinghoundmyjaeger-1-5674bd7fvzxbh.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:46 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestleadinghoundmyjaeger-1-5674bd7fvzxbh.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:57 +0000 UTC Normal Pod my-jaeger-collector-7749bd94cd-dww5b Binding Scheduled Successfully assigned kuttl-test-leading-hound/my-jaeger-collector-7749bd94cd-dww5b to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:57 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-7749bd94cd SuccessfulCreate Created pod: my-jaeger-collector-7749bd94cd-dww5b replicaset-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:57 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-7749bd94cd to 1 deployment-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:57 +0000 UTC Normal Pod my-jaeger-query-75845c5cb9-z5qh5 Binding Scheduled Successfully assigned kuttl-test-leading-hound/my-jaeger-query-75845c5cb9-z5qh5 to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:57 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-75845c5cb9 SuccessfulCreate Created pod: my-jaeger-query-75845c5cb9-z5qh5 replicaset-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:57 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-75845c5cb9 to 1 deployment-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:58 +0000 UTC Normal Pod my-jaeger-collector-7749bd94cd-dww5b AddedInterface Add eth0 [10.131.0.30/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:58 +0000 UTC Normal Pod my-jaeger-collector-7749bd94cd-dww5b.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:58 +0000 UTC Normal Pod my-jaeger-collector-7749bd94cd-dww5b.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:58 +0000 UTC Normal Pod my-jaeger-collector-7749bd94cd-dww5b.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:58 +0000 UTC Normal Pod my-jaeger-query-75845c5cb9-z5qh5 AddedInterface Add eth0 [10.128.2.30/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:58 +0000 UTC Normal Pod my-jaeger-query-75845c5cb9-z5qh5.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:58 +0000 UTC Normal Pod my-jaeger-query-75845c5cb9-z5qh5.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:58 +0000 UTC Normal Pod my-jaeger-query-75845c5cb9-z5qh5.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:58 +0000 UTC Normal Pod my-jaeger-query-75845c5cb9-z5qh5.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:58 +0000 UTC Normal Pod my-jaeger-query-75845c5cb9-z5qh5.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:58 +0000 UTC Normal Pod my-jaeger-query-75845c5cb9-z5qh5.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:58 +0000 UTC Normal Pod my-jaeger-query-75845c5cb9-z5qh5.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:58 +0000 UTC Normal Pod my-jaeger-query-75845c5cb9-z5qh5.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:00:58 +0000 UTC Normal Pod my-jaeger-query-75845c5cb9-z5qh5.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:04 +0000 UTC Normal Pod my-jaeger-query-75845c5cb9-z5qh5.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:04 +0000 UTC Normal Pod my-jaeger-query-75845c5cb9-z5qh5.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:04 +0000 UTC Normal Pod my-jaeger-query-75845c5cb9-z5qh5.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:04 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-75845c5cb9 SuccessfulDelete Deleted pod: my-jaeger-query-75845c5cb9-z5qh5 replicaset-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:04 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-75845c5cb9 to 0 from 1 deployment-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:05 +0000 UTC Normal Pod my-jaeger-query-786dfbb74d-htdgt Binding Scheduled Successfully assigned kuttl-test-leading-hound/my-jaeger-query-786dfbb74d-htdgt to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:05 +0000 UTC Normal Pod my-jaeger-query-786dfbb74d-htdgt AddedInterface Add eth0 [10.128.2.31/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:05 +0000 UTC Normal Pod my-jaeger-query-786dfbb74d-htdgt.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:05 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-786dfbb74d SuccessfulCreate Created pod: my-jaeger-query-786dfbb74d-htdgt replicaset-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:05 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-786dfbb74d to 1 deployment-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:06 +0000 UTC Normal Pod my-jaeger-query-786dfbb74d-htdgt.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:06 +0000 UTC Normal Pod my-jaeger-query-786dfbb74d-htdgt.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:06 +0000 UTC Normal Pod my-jaeger-query-786dfbb74d-htdgt.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:06 +0000 UTC Normal Pod my-jaeger-query-786dfbb74d-htdgt.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:06 +0000 UTC Normal Pod my-jaeger-query-786dfbb74d-htdgt.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:06 +0000 UTC Normal Pod my-jaeger-query-786dfbb74d-htdgt.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:06 +0000 UTC Normal Pod my-jaeger-query-786dfbb74d-htdgt.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:06 +0000 UTC Normal Pod my-jaeger-query-786dfbb74d-htdgt.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:09 +0000 UTC Normal Pod 00-report-span-7x7zf Binding Scheduled Successfully assigned kuttl-test-leading-hound/00-report-span-7x7zf to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:09 +0000 UTC Normal Job.batch 00-report-span SuccessfulCreate Created pod: 00-report-span-7x7zf job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:10 +0000 UTC Normal Pod 00-report-span-7x7zf AddedInterface Add eth0 [10.131.0.31/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:10 +0000 UTC Normal Pod 00-report-span-7x7zf.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:10 +0000 UTC Normal Pod 00-report-span-7x7zf.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:10 +0000 UTC Normal Pod 00-report-span-7x7zf.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:12 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:12 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:12 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:33 +0000 UTC Normal Pod 00-check-indices-5wpbm Binding Scheduled Successfully assigned kuttl-test-leading-hound/00-check-indices-5wpbm to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:33 +0000 UTC Normal Pod 00-check-indices-5wpbm AddedInterface Add eth0 [10.131.0.32/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:33 +0000 UTC Normal Pod 00-check-indices-5wpbm.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:33 +0000 UTC Normal Pod 00-check-indices-5wpbm.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:33 +0000 UTC Normal Pod 00-check-indices-5wpbm.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:33 +0000 UTC Normal Job.batch 00-check-indices SuccessfulCreate Created pod: 00-check-indices-5wpbm job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:33 +0000 UTC Normal Job.batch 00-report-span Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:36 +0000 UTC Normal Job.batch 00-check-indices Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:36 +0000 UTC Normal Pod 01-check-indices-pbnzk Binding Scheduled Successfully assigned kuttl-test-leading-hound/01-check-indices-pbnzk to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:36 +0000 UTC Normal Pod 01-check-indices-pbnzk AddedInterface Add eth0 [10.131.0.33/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:36 +0000 UTC Normal Pod 01-check-indices-pbnzk.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:36 +0000 UTC Normal Pod 01-check-indices-pbnzk.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:36 +0000 UTC Normal Pod 01-check-indices-pbnzk.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:36 +0000 UTC Normal Job.batch 01-check-indices SuccessfulCreate Created pod: 01-check-indices-pbnzk job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:39 +0000 UTC Normal Job.batch 01-check-indices Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:40 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-xjwkh Binding Scheduled Successfully assigned kuttl-test-leading-hound/my-jaeger-es-rollover-create-mapping-xjwkh to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:40 +0000 UTC Normal Job.batch my-jaeger-es-rollover-create-mapping SuccessfulCreate Created pod: my-jaeger-es-rollover-create-mapping-xjwkh job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:41 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-xjwkh AddedInterface Add eth0 [10.131.0.34/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:41 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-xjwkh.spec.containers{my-jaeger-es-rollover-create-mapping} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:42 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:42 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod my-jaeger-collector-7749bd94cd-dww5b horizontal-pod-autoscaler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:42 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:44 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-xjwkh.spec.containers{my-jaeger-es-rollover-create-mapping} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" in 3.756s (3.756s including waiting) kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:44 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-xjwkh.spec.containers{my-jaeger-es-rollover-create-mapping} Created Created container my-jaeger-es-rollover-create-mapping kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:44 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-xjwkh.spec.containers{my-jaeger-es-rollover-create-mapping} Started Started container my-jaeger-es-rollover-create-mapping kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:48 +0000 UTC Normal Pod my-jaeger-collector-7749bd94cd-dww5b.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:48 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-7749bd94cd SuccessfulDelete Deleted pod: my-jaeger-collector-7749bd94cd-dww5b replicaset-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:48 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled down replica set my-jaeger-collector-7749bd94cd to 0 from 1 deployment-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:48 +0000 UTC Normal Job.batch my-jaeger-es-rollover-create-mapping Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:48 +0000 UTC Normal Pod my-jaeger-query-786dfbb74d-htdgt.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:48 +0000 UTC Normal Pod my-jaeger-query-786dfbb74d-htdgt.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:48 +0000 UTC Normal Pod my-jaeger-query-786dfbb74d-htdgt.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:48 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-786dfbb74d SuccessfulDelete Deleted pod: my-jaeger-query-786dfbb74d-htdgt replicaset-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:48 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-786dfbb74d to 0 from 1 deployment-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:49 +0000 UTC Normal Pod my-jaeger-collector-7898d974b9-h8jk9 Binding Scheduled Successfully assigned kuttl-test-leading-hound/my-jaeger-collector-7898d974b9-h8jk9 to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:49 +0000 UTC Normal Pod my-jaeger-collector-7898d974b9-h8jk9 AddedInterface Add eth0 [10.128.2.32/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:49 +0000 UTC Normal Pod my-jaeger-collector-7898d974b9-h8jk9.spec.containers{jaeger-collector} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:49 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-7898d974b9 SuccessfulCreate Created pod: my-jaeger-collector-7898d974b9-h8jk9 replicaset-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:49 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-7898d974b9 to 1 deployment-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:49 +0000 UTC Normal Pod my-jaeger-query-69c4cdcf45-kdrz7 Binding Scheduled Successfully assigned kuttl-test-leading-hound/my-jaeger-query-69c4cdcf45-kdrz7 to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:49 +0000 UTC Normal Pod my-jaeger-query-69c4cdcf45-kdrz7 AddedInterface Add eth0 [10.131.0.35/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:49 +0000 UTC Normal Pod my-jaeger-query-69c4cdcf45-kdrz7.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:49 +0000 UTC Normal Pod my-jaeger-query-69c4cdcf45-kdrz7.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:49 +0000 UTC Normal Pod my-jaeger-query-69c4cdcf45-kdrz7.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:49 +0000 UTC Normal Pod my-jaeger-query-69c4cdcf45-kdrz7.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:49 +0000 UTC Normal Pod my-jaeger-query-69c4cdcf45-kdrz7.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:49 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-69c4cdcf45 SuccessfulCreate Created pod: my-jaeger-query-69c4cdcf45-kdrz7 replicaset-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:49 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-69c4cdcf45 to 1 deployment-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:50 +0000 UTC Normal Pod my-jaeger-query-69c4cdcf45-kdrz7.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:50 +0000 UTC Normal Pod my-jaeger-query-69c4cdcf45-kdrz7.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:50 +0000 UTC Normal Pod my-jaeger-query-69c4cdcf45-kdrz7.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:50 +0000 UTC Normal Pod my-jaeger-query-69c4cdcf45-kdrz7.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:51 +0000 UTC Normal Pod my-jaeger-collector-7898d974b9-h8jk9.spec.containers{jaeger-collector} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" in 1.934s (1.934s including waiting) kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:51 +0000 UTC Normal Pod my-jaeger-collector-7898d974b9-h8jk9.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:01:51 +0000 UTC Normal Pod my-jaeger-collector-7898d974b9-h8jk9.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371302-j7ks7 Binding Scheduled Successfully assigned kuttl-test-leading-hound/my-jaeger-es-lookback-28371302-j7ks7 to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371302-j7ks7 AddedInterface Add eth0 [10.128.2.33/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371302-j7ks7.spec.containers{my-jaeger-es-lookback} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28371302 SuccessfulCreate Created pod: my-jaeger-es-lookback-28371302-j7ks7 job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28371302 cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371302-pc5cm Binding Scheduled Successfully assigned kuttl-test-leading-hound/my-jaeger-es-rollover-28371302-pc5cm to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371302-pc5cm AddedInterface Add eth0 [10.128.2.34/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371302-pc5cm.spec.containers{my-jaeger-es-rollover} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28371302 SuccessfulCreate Created pod: my-jaeger-es-rollover-28371302-pc5cm job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28371302 cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:01 +0000 UTC Normal Pod my-jaeger-es-lookback-28371302-j7ks7.spec.containers{my-jaeger-es-lookback} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" in 1.19s (1.19s including waiting) kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:01 +0000 UTC Normal Pod my-jaeger-es-lookback-28371302-j7ks7.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:01 +0000 UTC Normal Pod my-jaeger-es-lookback-28371302-j7ks7.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:01 +0000 UTC Normal Pod my-jaeger-es-rollover-28371302-pc5cm.spec.containers{my-jaeger-es-rollover} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" in 1.183s (1.183s including waiting) kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:01 +0000 UTC Normal Pod my-jaeger-es-rollover-28371302-pc5cm.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:01 +0000 UTC Normal Pod my-jaeger-es-rollover-28371302-pc5cm.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:04 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28371302 Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:04 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28371302, status: Complete cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:04 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28371302 Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:04 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28371302, status: Complete cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:12 +0000 UTC Normal Pod 02-report-span-cn8hb Binding Scheduled Successfully assigned kuttl-test-leading-hound/02-report-span-cn8hb to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:12 +0000 UTC Normal Job.batch 02-report-span SuccessfulCreate Created pod: 02-report-span-cn8hb job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:13 +0000 UTC Normal Pod 02-report-span-cn8hb AddedInterface Add eth0 [10.128.2.35/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:13 +0000 UTC Normal Pod 02-report-span-cn8hb.spec.containers{asserts-container} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:15 +0000 UTC Normal Pod 02-report-span-cn8hb.spec.containers{asserts-container} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" in 2.174s (2.174s including waiting) kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:15 +0000 UTC Normal Pod 02-report-span-cn8hb.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:15 +0000 UTC Normal Pod 02-report-span-cn8hb.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:38 +0000 UTC Normal Job.batch 02-report-span Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:39 +0000 UTC Normal Pod 02-check-indices-hk9tx Binding Scheduled Successfully assigned kuttl-test-leading-hound/02-check-indices-hk9tx to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:39 +0000 UTC Normal Pod 02-check-indices-hk9tx AddedInterface Add eth0 [10.128.2.36/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:39 +0000 UTC Normal Pod 02-check-indices-hk9tx.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:39 +0000 UTC Normal Pod 02-check-indices-hk9tx.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:39 +0000 UTC Normal Job.batch 02-check-indices SuccessfulCreate Created pod: 02-check-indices-hk9tx job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:40 +0000 UTC Normal Pod 02-check-indices-hk9tx.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:42 +0000 UTC Normal Job.batch 02-check-indices Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:42 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod my-jaeger-collector-7898d974b9-h8jk9 horizontal-pod-autoscaler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:43 +0000 UTC Normal Pod 03-check-indices-6mhwc Binding Scheduled Successfully assigned kuttl-test-leading-hound/03-check-indices-6mhwc to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:43 +0000 UTC Normal Job.batch 03-check-indices SuccessfulCreate Created pod: 03-check-indices-6mhwc job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:44 +0000 UTC Normal Pod 03-check-indices-6mhwc AddedInterface Add eth0 [10.128.2.37/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:44 +0000 UTC Normal Pod 03-check-indices-6mhwc.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:44 +0000 UTC Normal Pod 03-check-indices-6mhwc.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:44 +0000 UTC Normal Pod 03-check-indices-6mhwc.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:46 +0000 UTC Normal Job.batch 03-check-indices Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:46 +0000 UTC Normal Pod 04-check-indices-cvpb7 Binding Scheduled Successfully assigned kuttl-test-leading-hound/04-check-indices-cvpb7 to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:46 +0000 UTC Normal Job.batch 04-check-indices SuccessfulCreate Created pod: 04-check-indices-cvpb7 job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:47 +0000 UTC Normal Pod 04-check-indices-cvpb7 AddedInterface Add eth0 [10.128.2.38/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:47 +0000 UTC Normal Pod 04-check-indices-cvpb7.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:47 +0000 UTC Normal Pod 04-check-indices-cvpb7.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:47 +0000 UTC Normal Pod 04-check-indices-cvpb7.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:49 +0000 UTC Normal Job.batch 04-check-indices Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:58 +0000 UTC Normal Pod 03-report-span-nqxzg Binding Scheduled Successfully assigned kuttl-test-leading-hound/03-report-span-nqxzg to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:58 +0000 UTC Normal Job.batch 03-report-span SuccessfulCreate Created pod: 03-report-span-nqxzg job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:59 +0000 UTC Normal Pod 03-report-span-nqxzg AddedInterface Add eth0 [10.128.2.39/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:59 +0000 UTC Normal Pod 03-report-span-nqxzg.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:59 +0000 UTC Normal Pod 03-report-span-nqxzg.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:02:59 +0000 UTC Normal Pod 03-report-span-nqxzg.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:03:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371303-wth4s Binding Scheduled Successfully assigned kuttl-test-leading-hound/my-jaeger-es-lookback-28371303-wth4s to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:03:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371303-wth4s AddedInterface Add eth0 [10.128.2.40/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:03:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371303-wth4s.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:03:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371303-wth4s.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:03:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371303-wth4s.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:03:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28371303 SuccessfulCreate Created pod: my-jaeger-es-lookback-28371303-wth4s job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:03:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28371303 cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:03:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371303-rwhtw Binding Scheduled Successfully assigned kuttl-test-leading-hound/my-jaeger-es-rollover-28371303-rwhtw to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:03:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371303-rwhtw AddedInterface Add eth0 [10.131.0.36/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:03:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371303-rwhtw.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:03:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371303-rwhtw.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:03:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371303-rwhtw.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:03:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28371303 SuccessfulCreate Created pod: my-jaeger-es-rollover-28371303-rwhtw job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:03:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28371303 cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:03:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28371303 Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:03:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28371303, status: Complete cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:03:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28371303 Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:03:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28371303, status: Complete cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:03:21 +0000 UTC Normal Job.batch 03-report-span Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:04:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371304-r2n2p Binding Scheduled Successfully assigned kuttl-test-leading-hound/my-jaeger-es-lookback-28371304-r2n2p to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:04:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371304-r2n2p AddedInterface Add eth0 [10.128.2.41/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:04:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371304-r2n2p.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:04:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371304-r2n2p.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:04:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371304-r2n2p.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:04:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28371304 SuccessfulCreate Created pod: my-jaeger-es-lookback-28371304-r2n2p job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:04:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28371304 cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:04:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371304-5x7l9 Binding Scheduled Successfully assigned kuttl-test-leading-hound/my-jaeger-es-rollover-28371304-5x7l9 to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:04:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371304-5x7l9 AddedInterface Add eth0 [10.131.0.37/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:04:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371304-5x7l9.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:04:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371304-5x7l9.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:04:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371304-5x7l9.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:04:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28371304 SuccessfulCreate Created pod: my-jaeger-es-rollover-28371304-5x7l9 job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:04:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28371304 cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:04:02 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28371304 Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:04:02 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28371304, status: Complete cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:04:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28371304 Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:04:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28371304, status: Complete cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:05:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371305-t5lnz Binding Scheduled Successfully assigned kuttl-test-leading-hound/my-jaeger-es-lookback-28371305-t5lnz to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:05:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371305-t5lnz AddedInterface Add eth0 [10.128.2.43/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:05:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371305-t5lnz.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:05:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371305-t5lnz.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:05:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371305-t5lnz.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:05:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28371305 SuccessfulCreate Created pod: my-jaeger-es-lookback-28371305-t5lnz job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:05:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28371305 cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:05:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371305-dw9g5 Binding Scheduled Successfully assigned kuttl-test-leading-hound/my-jaeger-es-rollover-28371305-dw9g5 to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:05:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371305-dw9g5 AddedInterface Add eth0 [10.128.2.42/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:05:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371305-dw9g5.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:05:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371305-dw9g5.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:05:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371305-dw9g5.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:05:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28371305 SuccessfulCreate Created pod: my-jaeger-es-rollover-28371305-dw9g5 job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:05:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28371305 cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:05:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28371305 Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:05:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulDelete Deleted job my-jaeger-es-lookback-28371302 cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:05:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28371305, status: Complete cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:05:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28371305 Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:05:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulDelete Deleted job my-jaeger-es-rollover-28371302 cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:05:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28371305, status: Complete cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:06:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371306-zhlrh Binding Scheduled Successfully assigned kuttl-test-leading-hound/my-jaeger-es-lookback-28371306-zhlrh to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:06:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371306-zhlrh AddedInterface Add eth0 [10.128.2.44/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:06:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371306-zhlrh.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:06:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371306-zhlrh.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:06:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371306-zhlrh.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:06:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28371306 SuccessfulCreate Created pod: my-jaeger-es-lookback-28371306-zhlrh job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:06:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28371306 cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:06:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371306-qkbtb Binding Scheduled Successfully assigned kuttl-test-leading-hound/my-jaeger-es-rollover-28371306-qkbtb to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:06:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371306-qkbtb AddedInterface Add eth0 [10.128.2.45/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:06:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371306-qkbtb.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:06:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371306-qkbtb.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:06:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371306-qkbtb.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:06:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28371306 SuccessfulCreate Created pod: my-jaeger-es-rollover-28371306-qkbtb job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:06:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28371306 cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:06:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28371306 Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:06:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulDelete Deleted job my-jaeger-es-lookback-28371303 cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:06:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28371306, status: Complete cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:06:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28371306 Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:06:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulDelete Deleted job my-jaeger-es-rollover-28371303 cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:06:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28371306, status: Complete cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371307-p7kfm Binding Scheduled Successfully assigned kuttl-test-leading-hound/my-jaeger-es-lookback-28371307-p7kfm to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371307-p7kfm AddedInterface Add eth0 [10.131.0.38/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371307-p7kfm.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371307-p7kfm.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28371307-p7kfm.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28371307 SuccessfulCreate Created pod: my-jaeger-es-lookback-28371307-p7kfm job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28371307 cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371307-64lmt Binding Scheduled Successfully assigned kuttl-test-leading-hound/my-jaeger-es-rollover-28371307-64lmt to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371307-64lmt AddedInterface Add eth0 [10.128.2.46/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371307-64lmt.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371307-64lmt.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28371307-64lmt.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28371307 SuccessfulCreate Created pod: my-jaeger-es-rollover-28371307-64lmt job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28371307 cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:02 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28371307 Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:02 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulDelete Deleted job my-jaeger-es-lookback-28371304 cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:02 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28371307, status: Complete cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28371307 Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulDelete Deleted job my-jaeger-es-rollover-28371304 cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28371307, status: Complete cronjob-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:04 +0000 UTC Normal Pod 05-check-indices-dph9m Binding Scheduled Successfully assigned kuttl-test-leading-hound/05-check-indices-dph9m to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:04 +0000 UTC Normal Pod 05-check-indices-dph9m AddedInterface Add eth0 [10.128.2.47/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:04 +0000 UTC Normal Pod 05-check-indices-dph9m.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:04 +0000 UTC Normal Pod 05-check-indices-dph9m.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:04 +0000 UTC Normal Pod 05-check-indices-dph9m.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:04 +0000 UTC Normal Job.batch 05-check-indices SuccessfulCreate Created pod: 05-check-indices-dph9m job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:07 +0000 UTC Normal Job.batch 05-check-indices Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:07 +0000 UTC Normal Pod 06-check-indices-dc4zf Binding Scheduled Successfully assigned kuttl-test-leading-hound/06-check-indices-dc4zf to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:07 +0000 UTC Normal Pod 06-check-indices-dc4zf AddedInterface Add eth0 [10.128.2.48/23] from ovn-kubernetes logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:07 +0000 UTC Normal Pod 06-check-indices-dc4zf.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:07 +0000 UTC Normal Job.batch 06-check-indices SuccessfulCreate Created pod: 06-check-indices-dc4zf job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:08 +0000 UTC Normal Pod 06-check-indices-dc4zf.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:08 +0000 UTC Normal Pod 06-check-indices-dc4zf.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:07:10 | es-rollover-autoprov | 2023-12-11 07:07:10 +0000 UTC Normal Job.batch 06-check-indices Completed Job completed job-controller logger.go:42: 07:07:10 | es-rollover-autoprov | Deleting namespace: kuttl-test-leading-hound === CONT kuttl/harness/es-increasing-replicas logger.go:42: 07:07:18 | es-increasing-replicas | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:07:18 | es-increasing-replicas | Ignoring check-es-nodes.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:07:18 | es-increasing-replicas | Ignoring openshift-check-es-nodes.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:07:18 | es-increasing-replicas | Creating namespace: kuttl-test-hip-pony logger.go:42: 07:07:18 | es-increasing-replicas/1-install | starting test step 1-install logger.go:42: 07:07:18 | es-increasing-replicas/1-install | Jaeger:kuttl-test-hip-pony/simple-prod created logger.go:42: 07:08:11 | es-increasing-replicas/1-install | test step completed 1-install logger.go:42: 07:08:11 | es-increasing-replicas/2-install | starting test step 2-install logger.go:42: 07:08:11 | es-increasing-replicas/2-install | Jaeger:kuttl-test-hip-pony/simple-prod updated logger.go:42: 07:08:34 | es-increasing-replicas/2-install | test step completed 2-install logger.go:42: 07:08:34 | es-increasing-replicas/3-smoke-test | starting test step 3-smoke-test logger.go:42: 07:08:34 | es-increasing-replicas/3-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 07:09:00 | es-increasing-replicas/3-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:09:15 | es-increasing-replicas/3-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:09:15 | es-increasing-replicas/3-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:09:16 | es-increasing-replicas/3-smoke-test | job.batch/report-span created logger.go:42: 07:09:16 | es-increasing-replicas/3-smoke-test | job.batch/check-span created logger.go:42: 07:09:27 | es-increasing-replicas/3-smoke-test | test step completed 3-smoke-test logger.go:42: 07:09:27 | es-increasing-replicas/4-install | starting test step 4-install logger.go:42: 07:09:27 | es-increasing-replicas/4-install | Jaeger:kuttl-test-hip-pony/simple-prod updated logger.go:42: 07:09:27 | es-increasing-replicas/4-install | test step completed 4-install logger.go:42: 07:09:27 | es-increasing-replicas/5-check-es-nodes | starting test step 5-check-es-nodes logger.go:42: 07:09:27 | es-increasing-replicas/5-check-es-nodes | running command: [sh -c ./check-es-nodes.sh $NAMESPACE] logger.go:42: 07:09:28 | es-increasing-replicas/5-check-es-nodes | Checking if the number of ES instances is the expected logger.go:42: 07:09:28 | es-increasing-replicas/5-check-es-nodes | false logger.go:42: 07:09:28 | es-increasing-replicas/5-check-es-nodes | Error: no matches found logger.go:42: 07:09:33 | es-increasing-replicas/5-check-es-nodes | Checking if the number of ES instances is the expected logger.go:42: 07:09:33 | es-increasing-replicas/5-check-es-nodes | true logger.go:42: 07:09:33 | es-increasing-replicas/5-check-es-nodes | test step completed 5-check-es-nodes logger.go:42: 07:09:33 | es-increasing-replicas | es-increasing-replicas events from ns kuttl-test-hip-pony: logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:07:41 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthipponysimpleprod-1-67fb6d5948-c7r8p Binding Scheduled Successfully assigned kuttl-test-hip-pony/elasticsearch-cdm-kuttltesthipponysimpleprod-1-67fb6d5948-c7r8p to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:07:41 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthipponysimpleprod-1-67fb6d5948-c7r8p AddedInterface Add eth0 [10.129.2.20/23] from ovn-kubernetes logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:07:41 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthipponysimpleprod-1-67fb6d5948-c7r8p.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:07:41 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthipponysimpleprod-1-67fb6d5948-c7r8p.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:07:41 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthipponysimpleprod-1-67fb6d5948-c7r8p.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:07:41 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthipponysimpleprod-1-67fb6d5948-c7r8p.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:07:41 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthipponysimpleprod-1-67fb6d5948-c7r8p.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:07:41 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthipponysimpleprod-1-67fb6d5948-c7r8p.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:07:41 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltesthipponysimpleprod-1-67fb6d5948 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltesthipponysimpleprod-1-67fb6d5948-c7r8p replicaset-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:07:41 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltesthipponysimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltesthipponysimpleprod-1-67fb6d5948 to 1 deployment-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:07:51 +0000 UTC Warning Pod elasticsearch-cdm-kuttltesthipponysimpleprod-1-67fb6d5948-c7r8p.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:07:56 +0000 UTC Warning Pod elasticsearch-cdm-kuttltesthipponysimpleprod-1-67fb6d5948-c7r8p.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:08 +0000 UTC Normal Pod simple-prod-collector-69487b48c-ltvg2 Binding Scheduled Successfully assigned kuttl-test-hip-pony/simple-prod-collector-69487b48c-ltvg2 to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:08 +0000 UTC Normal Pod simple-prod-collector-69487b48c-ltvg2 AddedInterface Add eth0 [10.128.2.49/23] from ovn-kubernetes logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:08 +0000 UTC Normal Pod simple-prod-collector-69487b48c-ltvg2.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:08 +0000 UTC Normal Pod simple-prod-collector-69487b48c-ltvg2.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:08 +0000 UTC Normal Pod simple-prod-collector-69487b48c-ltvg2.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:08 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-69487b48c SuccessfulCreate Created pod: simple-prod-collector-69487b48c-ltvg2 replicaset-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:08 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-69487b48c to 1 deployment-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:08 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-j5985 Binding Scheduled Successfully assigned kuttl-test-hip-pony/simple-prod-query-7b6f6d9ddc-j5985 to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:08 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-j5985 AddedInterface Add eth0 [10.131.0.39/23] from ovn-kubernetes logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:08 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-j5985.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:08 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-j5985.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:08 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-j5985.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:08 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-j5985.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:08 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7b6f6d9ddc SuccessfulCreate Created pod: simple-prod-query-7b6f6d9ddc-j5985 replicaset-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:08 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-7b6f6d9ddc to 1 deployment-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:09 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-j5985.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:09 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-j5985.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:09 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-j5985.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:09 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-j5985.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:09 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-j5985.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:15 +0000 UTC Normal Pod simple-prod-collector-69487b48c-cgm4b Binding Scheduled Successfully assigned kuttl-test-hip-pony/simple-prod-collector-69487b48c-cgm4b to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:15 +0000 UTC Normal Pod simple-prod-collector-69487b48c-cgm4b AddedInterface Add eth0 [10.129.2.21/23] from ovn-kubernetes logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:15 +0000 UTC Normal Pod simple-prod-collector-69487b48c-cgm4b.spec.containers{jaeger-collector} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:15 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-69487b48c SuccessfulCreate Created pod: simple-prod-collector-69487b48c-cgm4b replicaset-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:15 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-69487b48c to 2 from 1 deployment-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:15 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-svskj Binding Scheduled Successfully assigned kuttl-test-hip-pony/simple-prod-query-7b6f6d9ddc-svskj to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:15 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-svskj AddedInterface Add eth0 [10.129.2.22/23] from ovn-kubernetes logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:15 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-svskj.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:15 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7b6f6d9ddc SuccessfulCreate Created pod: simple-prod-query-7b6f6d9ddc-svskj replicaset-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:15 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-7b6f6d9ddc to 2 from 1 deployment-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:23 +0000 UTC Normal Pod simple-prod-collector-69487b48c-cgm4b.spec.containers{jaeger-collector} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" in 7.863s (7.863s including waiting) kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:23 +0000 UTC Normal Pod simple-prod-collector-69487b48c-cgm4b.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:23 +0000 UTC Normal Pod simple-prod-collector-69487b48c-cgm4b.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:23 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-svskj.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" in 7.852s (7.852s including waiting) kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:23 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-svskj.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:23 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-svskj.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:23 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-svskj.spec.containers{oauth-proxy} Pulling Pulling image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:30 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-svskj.spec.containers{oauth-proxy} Pulled Successfully pulled image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" in 6.847s (6.847s including waiting) kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:30 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-svskj.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:30 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-svskj.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:30 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-svskj.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:32 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-svskj.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" in 2.355s (2.355s including waiting) kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:32 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-svskj.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:08:32 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-svskj.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:01 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-j5985.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:01 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-j5985.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:01 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-j5985.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:01 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-svskj.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:01 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-svskj.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:01 +0000 UTC Normal Pod simple-prod-query-7b6f6d9ddc-svskj.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:01 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7b6f6d9ddc SuccessfulDelete Deleted pod: simple-prod-query-7b6f6d9ddc-j5985 replicaset-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:01 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7b6f6d9ddc SuccessfulDelete Deleted pod: simple-prod-query-7b6f6d9ddc-svskj replicaset-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:01 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-7b6f6d9ddc to 0 from 2 deployment-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:01 +0000 UTC Warning Endpoints simple-prod-query FailedToUpdateEndpoint Failed to update endpoint kuttl-test-hip-pony/simple-prod-query: Operation cannot be fulfilled on endpoints "simple-prod-query": the object has been modified; please apply your changes to the latest version and try again endpoint-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:02 +0000 UTC Normal Pod simple-prod-query-79db4df8df-gpqv8 Binding Scheduled Successfully assigned kuttl-test-hip-pony/simple-prod-query-79db4df8df-gpqv8 to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:02 +0000 UTC Normal Pod simple-prod-query-79db4df8df-gpqv8 AddedInterface Add eth0 [10.131.0.40/23] from ovn-kubernetes logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:02 +0000 UTC Normal Pod simple-prod-query-79db4df8df-gpqv8.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:02 +0000 UTC Normal Pod simple-prod-query-79db4df8df-gpqv8.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:02 +0000 UTC Normal Pod simple-prod-query-79db4df8df-gpqv8.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:02 +0000 UTC Normal Pod simple-prod-query-79db4df8df-gpqv8.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:02 +0000 UTC Normal Pod simple-prod-query-79db4df8df-t5lnl Binding Scheduled Successfully assigned kuttl-test-hip-pony/simple-prod-query-79db4df8df-t5lnl to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:02 +0000 UTC Normal Pod simple-prod-query-79db4df8df-t5lnl AddedInterface Add eth0 [10.129.2.23/23] from ovn-kubernetes logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:02 +0000 UTC Normal Pod simple-prod-query-79db4df8df-t5lnl.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:02 +0000 UTC Normal Pod simple-prod-query-79db4df8df-t5lnl.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:02 +0000 UTC Normal Pod simple-prod-query-79db4df8df-t5lnl.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:02 +0000 UTC Normal Pod simple-prod-query-79db4df8df-t5lnl.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:02 +0000 UTC Normal ReplicaSet.apps simple-prod-query-79db4df8df SuccessfulCreate Created pod: simple-prod-query-79db4df8df-gpqv8 replicaset-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:02 +0000 UTC Normal ReplicaSet.apps simple-prod-query-79db4df8df SuccessfulCreate Created pod: simple-prod-query-79db4df8df-t5lnl replicaset-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:02 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-79db4df8df to 2 deployment-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:03 +0000 UTC Normal Pod simple-prod-query-79db4df8df-gpqv8.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:03 +0000 UTC Normal Pod simple-prod-query-79db4df8df-gpqv8.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:03 +0000 UTC Normal Pod simple-prod-query-79db4df8df-gpqv8.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:03 +0000 UTC Normal Pod simple-prod-query-79db4df8df-gpqv8.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:03 +0000 UTC Normal Pod simple-prod-query-79db4df8df-gpqv8.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:03 +0000 UTC Normal Pod simple-prod-query-79db4df8df-t5lnl.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:03 +0000 UTC Normal Pod simple-prod-query-79db4df8df-t5lnl.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:03 +0000 UTC Normal Pod simple-prod-query-79db4df8df-t5lnl.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:03 +0000 UTC Normal Pod simple-prod-query-79db4df8df-t5lnl.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:03 +0000 UTC Normal Pod simple-prod-query-79db4df8df-t5lnl.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:16 +0000 UTC Normal Pod check-span-2mwdw Binding Scheduled Successfully assigned kuttl-test-hip-pony/check-span-2mwdw to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:16 +0000 UTC Normal Pod check-span-2mwdw AddedInterface Add eth0 [10.131.0.41/23] from ovn-kubernetes logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:16 +0000 UTC Normal Pod check-span-2mwdw.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:16 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-2mwdw job-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:16 +0000 UTC Normal Pod report-span-ddjd7 Binding Scheduled Successfully assigned kuttl-test-hip-pony/report-span-ddjd7 to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:16 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-ddjd7 job-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:17 +0000 UTC Normal Pod check-span-2mwdw.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:17 +0000 UTC Normal Pod check-span-2mwdw.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:17 +0000 UTC Normal Pod report-span-ddjd7 AddedInterface Add eth0 [10.128.2.50/23] from ovn-kubernetes logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:17 +0000 UTC Normal Pod report-span-ddjd7.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:17 +0000 UTC Normal Pod report-span-ddjd7.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:17 +0000 UTC Normal Pod report-span-ddjd7.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:27 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthipponysimpleprod-2-64f99d4c6d-n87dc Binding Scheduled Successfully assigned kuttl-test-hip-pony/elasticsearch-cdm-kuttltesthipponysimpleprod-2-64f99d4c6d-n87dc to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthipponysimpleprod-2-64f99d4c6d-n87dc AddedInterface Add eth0 [10.128.2.51/23] from ovn-kubernetes logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthipponysimpleprod-2-64f99d4c6d-n87dc.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthipponysimpleprod-2-64f99d4c6d-n87dc.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthipponysimpleprod-2-64f99d4c6d-n87dc.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthipponysimpleprod-2-64f99d4c6d-n87dc.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthipponysimpleprod-2-64f99d4c6d-n87dc.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthipponysimpleprod-2-64f99d4c6d-n87dc.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltesthipponysimpleprod-2-64f99d4c6d SuccessfulCreate Created pod: elasticsearch-cdm-kuttltesthipponysimpleprod-2-64f99d4c6d-n87dc replicaset-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltesthipponysimpleprod-2 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltesthipponysimpleprod-2-64f99d4c6d to 1 deployment-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Pod simple-prod-collector-584747f88-7m2v4 Binding Scheduled Successfully assigned kuttl-test-hip-pony/simple-prod-collector-584747f88-7m2v4 to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Pod simple-prod-collector-584747f88-x4cfn Binding Scheduled Successfully assigned kuttl-test-hip-pony/simple-prod-collector-584747f88-x4cfn to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-584747f88 SuccessfulCreate Created pod: simple-prod-collector-584747f88-7m2v4 replicaset-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-584747f88 SuccessfulCreate Created pod: simple-prod-collector-584747f88-x4cfn replicaset-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Pod simple-prod-collector-69487b48c-cgm4b.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Pod simple-prod-collector-69487b48c-ltvg2.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-69487b48c SuccessfulDelete Deleted pod: simple-prod-collector-69487b48c-ltvg2 replicaset-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-69487b48c SuccessfulDelete Deleted pod: simple-prod-collector-69487b48c-cgm4b replicaset-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled down replica set simple-prod-collector-69487b48c to 0 from 2 deployment-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-584747f88 to 2 deployment-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Pod simple-prod-query-79db4df8df-gpqv8.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Pod simple-prod-query-79db4df8df-gpqv8.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Pod simple-prod-query-79db4df8df-gpqv8.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Pod simple-prod-query-79db4df8df-t5lnl.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Pod simple-prod-query-79db4df8df-t5lnl.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Pod simple-prod-query-79db4df8df-t5lnl.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal ReplicaSet.apps simple-prod-query-79db4df8df SuccessfulDelete Deleted pod: simple-prod-query-79db4df8df-gpqv8 replicaset-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal ReplicaSet.apps simple-prod-query-79db4df8df SuccessfulDelete Deleted pod: simple-prod-query-79db4df8df-t5lnl replicaset-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:29 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-79db4df8df to 0 from 2 deployment-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-collector-584747f88-7m2v4 AddedInterface Add eth0 [10.131.0.42/23] from ovn-kubernetes logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-collector-584747f88-7m2v4.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-collector-584747f88-7m2v4.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-collector-584747f88-7m2v4.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-collector-584747f88-x4cfn AddedInterface Add eth0 [10.129.2.24/23] from ovn-kubernetes logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-collector-584747f88-x4cfn.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-collector-584747f88-x4cfn.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-collector-584747f88-x4cfn.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-nflkc Binding Scheduled Successfully assigned kuttl-test-hip-pony/simple-prod-query-6fc86c86b8-nflkc to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-nflkc AddedInterface Add eth0 [10.129.2.25/23] from ovn-kubernetes logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-nflkc.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-nflkc.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-nflkc.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-nflkc.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-nflkc.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-nflkc.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-nflkc.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-s44vn Binding Scheduled Successfully assigned kuttl-test-hip-pony/simple-prod-query-6fc86c86b8-s44vn to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-s44vn AddedInterface Add eth0 [10.131.0.43/23] from ovn-kubernetes logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-s44vn.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-s44vn.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-s44vn.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-s44vn.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-s44vn.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-s44vn.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-s44vn.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-s44vn.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-s44vn.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6fc86c86b8 SuccessfulCreate Created pod: simple-prod-query-6fc86c86b8-s44vn replicaset-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6fc86c86b8 SuccessfulCreate Created pod: simple-prod-query-6fc86c86b8-nflkc replicaset-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:30 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-6fc86c86b8 to 2 deployment-controller logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:31 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-nflkc.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:09:33 | es-increasing-replicas | 2023-12-11 07:09:31 +0000 UTC Normal Pod simple-prod-query-6fc86c86b8-nflkc.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:09:33 | es-increasing-replicas | Deleting namespace: kuttl-test-hip-pony === CONT kuttl/harness/es-index-cleaner-autoprov logger.go:42: 07:10:24 | es-index-cleaner-autoprov | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:10:25 | es-index-cleaner-autoprov | Creating namespace: kuttl-test-pleased-flea logger.go:42: 07:10:25 | es-index-cleaner-autoprov/1-install | starting test step 1-install logger.go:42: 07:10:25 | es-index-cleaner-autoprov/1-install | Jaeger:kuttl-test-pleased-flea/test-es-index-cleaner-with-prefix created logger.go:42: 07:11:01 | es-index-cleaner-autoprov/1-install | test step completed 1-install logger.go:42: 07:11:01 | es-index-cleaner-autoprov/2-report-spans | starting test step 2-report-spans logger.go:42: 07:11:01 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE test-es-index-cleaner-with-prefix /dev/null] logger.go:42: 07:11:03 | es-index-cleaner-autoprov/2-report-spans | Warning: resource jaegers/test-es-index-cleaner-with-prefix is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:11:10 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c DAYS=5 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a JOB_NUMBER=00 JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-00-job.yaml] logger.go:42: 07:11:11 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c kubectl apply -f report-span-00-job.yaml -n $NAMESPACE] logger.go:42: 07:12:14 | es-index-cleaner-autoprov/2-report-spans | job.batch/00-report-span created logger.go:42: 07:12:54 | es-index-cleaner-autoprov/2-report-spans | test step completed 2-report-spans logger.go:42: 07:12:54 | es-index-cleaner-autoprov/3-install | starting test step 3-install logger.go:42: 07:12:54 | es-index-cleaner-autoprov/3-install | Jaeger:kuttl-test-pleased-flea/test-es-index-cleaner-with-prefix updated logger.go:42: 07:12:54 | es-index-cleaner-autoprov/3-install | test step completed 3-install logger.go:42: 07:12:54 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | starting test step 4-wait-es-index-cleaner logger.go:42: 07:12:54 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | running command: [sh -c go run ../../../../cmd-utils/wait-cronjob/main.go --cronjob test-es-index-cleaner-with-prefix-es-index-cleaner --namespace $NAMESPACE] logger.go:42: 07:13:51 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-11T07:13:51Z" level=debug msg="Checking if the test-es-index-cleaner-with-prefix-es-index-cleaner CronJob exists" logger.go:42: 07:13:51 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-11T07:13:51Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 07:13:51 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-11T07:13:51Z" level=info msg="Cronjob test-es-index-cleaner-with-prefix-es-index-cleaner found successfully" logger.go:42: 07:13:51 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-11T07:13:51Z" level=debug msg="Waiting for the next scheduled job from test-es-index-cleaner-with-prefix-es-index-cleaner cronjob" logger.go:42: 07:13:51 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-11T07:13:51Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 07:13:51 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-11T07:13:51Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 07:14:01 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-11T07:14:01Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 07:14:11 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-11T07:14:11Z" level=info msg="Job of owner test-es-index-cleaner-with-prefix-es-index-cleaner succeeded after test-es-index-cleaner-with-prefix-es-index-cleaner 20.056002865s" logger.go:42: 07:14:11 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | test step completed 4-wait-es-index-cleaner logger.go:42: 07:14:11 | es-index-cleaner-autoprov/5-install | starting test step 5-install logger.go:42: 07:14:12 | es-index-cleaner-autoprov/5-install | Jaeger:kuttl-test-pleased-flea/test-es-index-cleaner-with-prefix updated logger.go:42: 07:14:12 | es-index-cleaner-autoprov/5-install | test step completed 5-install logger.go:42: 07:14:12 | es-index-cleaner-autoprov/6-check-indices | starting test step 6-check-indices logger.go:42: 07:14:12 | es-index-cleaner-autoprov/6-check-indices | Job:kuttl-test-pleased-flea/00-check-indices created logger.go:42: 07:14:15 | es-index-cleaner-autoprov/6-check-indices | test step completed 6-check-indices logger.go:42: 07:14:15 | es-index-cleaner-autoprov | es-index-cleaner-autoprov events from ns kuttl-test-pleased-flea: logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpleasedfleatestesindexclean-1-752dts Binding Scheduled Successfully assigned kuttl-test-pleased-flea/elasticsearch-cdm-kuttltestpleasedfleatestesindexclean-1-752dts to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:31 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpleasedfleatestesindexclean-1-752dts FailedMount MountVolume.SetUp failed for volume "elasticsearch-metrics" : secret "elasticsearch-metrics" not found kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:31 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestpleasedfleatestesindexclean-1-7dbdd4c666 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestpleasedfleatestesindexclean-1-752dts replicaset-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:31 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestpleasedfleatestesindexclean-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestpleasedfleatestesindexclean-1-7dbdd4c666 to 1 deployment-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpleasedfleatestesindexclean-1-752dts AddedInterface Add eth0 [10.129.2.26/23] from ovn-kubernetes logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpleasedfleatestesindexclean-1-752dts.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpleasedfleatestesindexclean-1-752dts.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpleasedfleatestesindexclean-1-752dts.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpleasedfleatestesindexclean-1-752dts.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpleasedfleatestesindexclean-1-752dts.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpleasedfleatestesindexclean-1-752dts.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:42 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpleasedfleatestesindexclean-1-752dts.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:47 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpleasedfleatestesindexclean-1-752dts.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:58 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-64f5c9bffd-g8xh2 Binding Scheduled Successfully assigned kuttl-test-pleased-flea/test-es-index-cleaner-with-prefix-collector-64f5c9bffd-g8xh2 to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:58 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-64f5c9bffd-g8xh2 AddedInterface Add eth0 [10.131.0.44/23] from ovn-kubernetes logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:58 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-64f5c9bffd-g8xh2.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:58 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-64f5c9bffd-g8xh2.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:58 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-64f5c9bffd-g8xh2.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:58 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-collector-64f5c9bffd SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-collector-64f5c9bffd-g8xh2 replicaset-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:58 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-collector ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-collector-64f5c9bffd to 1 deployment-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:58 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-689bdc66c6-98lwd Binding Scheduled Successfully assigned kuttl-test-pleased-flea/test-es-index-cleaner-with-prefix-query-689bdc66c6-98lwd to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:58 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-689bdc66c6-98lwd AddedInterface Add eth0 [10.128.2.52/23] from ovn-kubernetes logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:58 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-689bdc66c6-98lwd.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:58 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-689bdc66c6-98lwd.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:58 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-689bdc66c6 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-query-689bdc66c6-98lwd replicaset-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:58 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-query-689bdc66c6 to 1 deployment-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:59 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-689bdc66c6-98lwd.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:59 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-689bdc66c6-98lwd.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:59 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-689bdc66c6-98lwd.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:59 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-689bdc66c6-98lwd.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:59 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-689bdc66c6-98lwd.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:59 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-689bdc66c6-98lwd.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:10:59 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-689bdc66c6-98lwd.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:04 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled down replica set test-es-index-cleaner-with-prefix-query-689bdc66c6 to 0 from 1 deployment-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:05 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-689bdc66c6-98lwd.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:05 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-689bdc66c6-98lwd.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:05 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-689bdc66c6-98lwd.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:05 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-689bdc66c6 SuccessfulDelete Deleted pod: test-es-index-cleaner-with-prefix-query-689bdc66c6-98lwd replicaset-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:05 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-query-754ccc465f to 1 deployment-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:06 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-754ccc465f-vz6gp Binding Scheduled Successfully assigned kuttl-test-pleased-flea/test-es-index-cleaner-with-prefix-query-754ccc465f-vz6gp to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:06 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-754ccc465f-vz6gp AddedInterface Add eth0 [10.128.2.53/23] from ovn-kubernetes logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:06 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-754ccc465f-vz6gp.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:06 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-754ccc465f-vz6gp.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:06 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-754ccc465f-vz6gp.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:06 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-754ccc465f-vz6gp.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:06 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-754ccc465f-vz6gp.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:06 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-754ccc465f-vz6gp.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:06 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-754ccc465f-vz6gp.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:06 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-754ccc465f-vz6gp.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:06 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-754ccc465f-vz6gp.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:06 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-754ccc465f SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-query-754ccc465f-vz6gp replicaset-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:13 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:13 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:13 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:43 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:43 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod test-es-index-cleaner-with-prefix-collector-64f5c9bffd-g8xh2 horizontal-pod-autoscaler logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:11:43 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:12:14 +0000 UTC Normal Pod 00-report-span-s5crw Binding Scheduled Successfully assigned kuttl-test-pleased-flea/00-report-span-s5crw to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:12:14 +0000 UTC Normal Job.batch 00-report-span SuccessfulCreate Created pod: 00-report-span-s5crw job-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:12:15 +0000 UTC Normal Pod 00-report-span-s5crw AddedInterface Add eth0 [10.131.0.45/23] from ovn-kubernetes logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:12:15 +0000 UTC Normal Pod 00-report-span-s5crw.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:12:15 +0000 UTC Normal Pod 00-report-span-s5crw.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:12:15 +0000 UTC Normal Pod 00-report-span-s5crw.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:12:53 +0000 UTC Normal Job.batch 00-report-span Completed Job completed job-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:13:00 +0000 UTC Normal Job.batch test-es-index-cleaner-with-prefix-es-index-cleaner-28371313 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-es-index-cleaner-2837131t8vwx job-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:13:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2837131t8vwx Binding Scheduled Successfully assigned kuttl-test-pleased-flea/test-es-index-cleaner-with-prefix-es-index-cleaner-2837131t8vwx to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:13:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2837131t8vwx AddedInterface Add eth0 [10.131.0.46/23] from ovn-kubernetes logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:13:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2837131t8vwx.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-index-cleaner-rhel8@sha256:e5bea9a64ae418869cfb556d70e0a586c21589aed8606b4ff5850780ff5bbbd6" kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:13:00 +0000 UTC Normal CronJob.batch test-es-index-cleaner-with-prefix-es-index-cleaner SuccessfulCreate Created job test-es-index-cleaner-with-prefix-es-index-cleaner-28371313 cronjob-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:13:02 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2837131t8vwx.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-index-cleaner-rhel8@sha256:e5bea9a64ae418869cfb556d70e0a586c21589aed8606b4ff5850780ff5bbbd6" in 1.571s (1.571s including waiting) kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:13:02 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2837131t8vwx.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Created Created container test-es-index-cleaner-with-prefix-es-index-cleaner kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:13:02 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2837131t8vwx.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Started Started container test-es-index-cleaner-with-prefix-es-index-cleaner kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:13:04 +0000 UTC Normal Job.batch test-es-index-cleaner-with-prefix-es-index-cleaner-28371313 Completed Job completed job-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:13:04 +0000 UTC Normal CronJob.batch test-es-index-cleaner-with-prefix-es-index-cleaner SawCompletedJob Saw completed job: test-es-index-cleaner-with-prefix-es-index-cleaner-28371313, status: Complete cronjob-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:14:00 +0000 UTC Normal Job.batch test-es-index-cleaner-with-prefix-es-index-cleaner-28371314 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-es-index-cleaner-2837131dn26r job-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:14:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2837131dn26r Binding Scheduled Successfully assigned kuttl-test-pleased-flea/test-es-index-cleaner-with-prefix-es-index-cleaner-2837131dn26r to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:14:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2837131dn26r AddedInterface Add eth0 [10.131.0.47/23] from ovn-kubernetes logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:14:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2837131dn26r.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-index-cleaner-rhel8@sha256:e5bea9a64ae418869cfb556d70e0a586c21589aed8606b4ff5850780ff5bbbd6" already present on machine kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:14:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2837131dn26r.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Created Created container test-es-index-cleaner-with-prefix-es-index-cleaner kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:14:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2837131dn26r.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Started Started container test-es-index-cleaner-with-prefix-es-index-cleaner kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:14:00 +0000 UTC Normal CronJob.batch test-es-index-cleaner-with-prefix-es-index-cleaner SuccessfulCreate Created job test-es-index-cleaner-with-prefix-es-index-cleaner-28371314 cronjob-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:14:02 +0000 UTC Normal Job.batch test-es-index-cleaner-with-prefix-es-index-cleaner-28371314 Completed Job completed job-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:14:02 +0000 UTC Normal CronJob.batch test-es-index-cleaner-with-prefix-es-index-cleaner SawCompletedJob Saw completed job: test-es-index-cleaner-with-prefix-es-index-cleaner-28371314, status: Complete cronjob-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:14:12 +0000 UTC Normal Pod 00-check-indices-j9d6t Binding Scheduled Successfully assigned kuttl-test-pleased-flea/00-check-indices-j9d6t to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:14:12 +0000 UTC Normal Pod 00-check-indices-j9d6t AddedInterface Add eth0 [10.131.0.48/23] from ovn-kubernetes logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:14:12 +0000 UTC Normal Pod 00-check-indices-j9d6t.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:14:12 +0000 UTC Normal Pod 00-check-indices-j9d6t.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:14:12 +0000 UTC Normal Pod 00-check-indices-j9d6t.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:14:12 +0000 UTC Normal Job.batch 00-check-indices SuccessfulCreate Created pod: 00-check-indices-j9d6t job-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | 2023-12-11 07:14:14 +0000 UTC Normal Job.batch 00-check-indices Completed Job completed job-controller logger.go:42: 07:14:15 | es-index-cleaner-autoprov | Deleting namespace: kuttl-test-pleased-flea === CONT kuttl/harness/es-from-aio-to-production logger.go:42: 07:14:21 | es-from-aio-to-production | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:14:22 | es-from-aio-to-production | Creating namespace: kuttl-test-selected-sturgeon logger.go:42: 07:14:22 | es-from-aio-to-production/0-install | starting test step 0-install logger.go:42: 07:14:22 | es-from-aio-to-production/0-install | Jaeger:kuttl-test-selected-sturgeon/my-jaeger created logger.go:42: 07:14:29 | es-from-aio-to-production/0-install | test step completed 0-install logger.go:42: 07:14:29 | es-from-aio-to-production/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:14:29 | es-from-aio-to-production/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:15:10 | es-from-aio-to-production/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:15:16 | es-from-aio-to-production/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:15:17 | es-from-aio-to-production/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:15:17 | es-from-aio-to-production/1-smoke-test | job.batch/report-span created logger.go:42: 07:15:17 | es-from-aio-to-production/1-smoke-test | job.batch/check-span created logger.go:42: 07:15:29 | es-from-aio-to-production/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:15:29 | es-from-aio-to-production/3-install | starting test step 3-install logger.go:42: 07:15:29 | es-from-aio-to-production/3-install | Jaeger:kuttl-test-selected-sturgeon/my-jaeger updated logger.go:42: 07:16:03 | es-from-aio-to-production/3-install | test step completed 3-install logger.go:42: 07:16:03 | es-from-aio-to-production/4-smoke-test | starting test step 4-smoke-test logger.go:42: 07:16:03 | es-from-aio-to-production/4-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:16:23 | es-from-aio-to-production/4-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:16:24 | es-from-aio-to-production/4-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:16:24 | es-from-aio-to-production/4-smoke-test | job.batch/report-span unchanged logger.go:42: 07:16:24 | es-from-aio-to-production/4-smoke-test | job.batch/check-span unchanged logger.go:42: 07:16:24 | es-from-aio-to-production/4-smoke-test | test step completed 4-smoke-test logger.go:42: 07:16:24 | es-from-aio-to-production | es-from-aio-to-production events from ns kuttl-test-selected-sturgeon: logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:14:25 +0000 UTC Normal Pod my-jaeger-75f7588b97-4tx77 Binding Scheduled Successfully assigned kuttl-test-selected-sturgeon/my-jaeger-75f7588b97-4tx77 to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:14:25 +0000 UTC Normal ReplicaSet.apps my-jaeger-75f7588b97 SuccessfulCreate Created pod: my-jaeger-75f7588b97-4tx77 replicaset-controller logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:14:25 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-75f7588b97 to 1 deployment-controller logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:14:26 +0000 UTC Normal Pod my-jaeger-75f7588b97-4tx77 AddedInterface Add eth0 [10.129.2.27/23] from ovn-kubernetes logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:14:26 +0000 UTC Normal Pod my-jaeger-75f7588b97-4tx77.spec.containers{jaeger} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:14:28 +0000 UTC Normal Pod my-jaeger-75f7588b97-4tx77.spec.containers{jaeger} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" in 1.961s (1.961s including waiting) kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:14:28 +0000 UTC Normal Pod my-jaeger-75f7588b97-4tx77.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:14:28 +0000 UTC Normal Pod my-jaeger-75f7588b97-4tx77.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:14:28 +0000 UTC Normal Pod my-jaeger-75f7588b97-4tx77.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:14:28 +0000 UTC Normal Pod my-jaeger-75f7588b97-4tx77.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:14:28 +0000 UTC Normal Pod my-jaeger-75f7588b97-4tx77.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:11 +0000 UTC Normal Pod my-jaeger-75f7588b97-4tx77.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:11 +0000 UTC Normal Pod my-jaeger-75f7588b97-4tx77.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:11 +0000 UTC Normal ReplicaSet.apps my-jaeger-75f7588b97 SuccessfulDelete Deleted pod: my-jaeger-75f7588b97-4tx77 replicaset-controller logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:11 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-75f7588b97 to 0 from 1 deployment-controller logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:12 +0000 UTC Normal Pod my-jaeger-554758f78f-vbngb Binding Scheduled Successfully assigned kuttl-test-selected-sturgeon/my-jaeger-554758f78f-vbngb to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:12 +0000 UTC Normal Pod my-jaeger-554758f78f-vbngb AddedInterface Add eth0 [10.129.2.28/23] from ovn-kubernetes logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:12 +0000 UTC Normal Pod my-jaeger-554758f78f-vbngb.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:12 +0000 UTC Normal Pod my-jaeger-554758f78f-vbngb.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:12 +0000 UTC Normal Pod my-jaeger-554758f78f-vbngb.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:12 +0000 UTC Normal Pod my-jaeger-554758f78f-vbngb.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:12 +0000 UTC Normal Pod my-jaeger-554758f78f-vbngb.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:12 +0000 UTC Normal ReplicaSet.apps my-jaeger-554758f78f SuccessfulCreate Created pod: my-jaeger-554758f78f-vbngb replicaset-controller logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:12 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-554758f78f to 1 deployment-controller logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:13 +0000 UTC Normal Pod my-jaeger-554758f78f-vbngb.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:17 +0000 UTC Normal Pod check-span-zscxf Binding Scheduled Successfully assigned kuttl-test-selected-sturgeon/check-span-zscxf to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:17 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-zscxf job-controller logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:17 +0000 UTC Normal Pod report-span-2rtmg Binding Scheduled Successfully assigned kuttl-test-selected-sturgeon/report-span-2rtmg to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:17 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-2rtmg job-controller logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:18 +0000 UTC Normal Pod check-span-zscxf AddedInterface Add eth0 [10.128.2.54/23] from ovn-kubernetes logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:18 +0000 UTC Normal Pod check-span-zscxf.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:18 +0000 UTC Normal Pod check-span-zscxf.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:18 +0000 UTC Normal Pod check-span-zscxf.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:18 +0000 UTC Normal Pod report-span-2rtmg AddedInterface Add eth0 [10.131.0.50/23] from ovn-kubernetes logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:18 +0000 UTC Normal Pod report-span-2rtmg.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:18 +0000 UTC Normal Pod report-span-2rtmg.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:18 +0000 UTC Normal Pod report-span-2rtmg.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:28 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:34 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestselectedsturgeonmyjaeger-1-5867b6895d SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestselectedsturgeonmyjaeger-1-5867f287n replicaset-controller logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestselectedsturgeonmyjaeger-1-5867f287n Binding Scheduled Successfully assigned kuttl-test-selected-sturgeon/elasticsearch-cdm-kuttltestselectedsturgeonmyjaeger-1-5867f287n to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestselectedsturgeonmyjaeger-1-5867f287n AddedInterface Add eth0 [10.128.2.55/23] from ovn-kubernetes logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestselectedsturgeonmyjaeger-1-5867f287n.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestselectedsturgeonmyjaeger-1-5867f287n.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestselectedsturgeonmyjaeger-1-5867f287n.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestselectedsturgeonmyjaeger-1-5867f287n.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:34 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestselectedsturgeonmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestselectedsturgeonmyjaeger-1-5867b6895d to 1 deployment-controller logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:35 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestselectedsturgeonmyjaeger-1-5867f287n.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:35 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestselectedsturgeonmyjaeger-1-5867f287n.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:44 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestselectedsturgeonmyjaeger-1-5867f287n.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:50 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestselectedsturgeonmyjaeger-1-5867f287n.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:15:51 +0000 UTC Normal Job.batch report-span Completed Job completed job-controller logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:01 +0000 UTC Normal Pod my-jaeger-554758f78f-vbngb.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:01 +0000 UTC Normal Pod my-jaeger-554758f78f-vbngb.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:01 +0000 UTC Normal Pod my-jaeger-collector-76c4658698-zb6xt Binding Scheduled Successfully assigned kuttl-test-selected-sturgeon/my-jaeger-collector-76c4658698-zb6xt to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:01 +0000 UTC Normal Pod my-jaeger-collector-76c4658698-zb6xt AddedInterface Add eth0 [10.131.0.51/23] from ovn-kubernetes logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:01 +0000 UTC Normal Pod my-jaeger-collector-76c4658698-zb6xt.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:01 +0000 UTC Normal Pod my-jaeger-collector-76c4658698-zb6xt.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:01 +0000 UTC Normal Pod my-jaeger-collector-76c4658698-zb6xt.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:01 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-76c4658698 SuccessfulCreate Created pod: my-jaeger-collector-76c4658698-zb6xt replicaset-controller logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:01 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-76c4658698 to 1 deployment-controller logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:01 +0000 UTC Normal Pod my-jaeger-query-67899b895c-gw4q6 Binding Scheduled Successfully assigned kuttl-test-selected-sturgeon/my-jaeger-query-67899b895c-gw4q6 to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:01 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-67899b895c SuccessfulCreate Created pod: my-jaeger-query-67899b895c-gw4q6 replicaset-controller logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:01 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-67899b895c to 1 deployment-controller logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:02 +0000 UTC Normal Pod my-jaeger-query-67899b895c-gw4q6 AddedInterface Add eth0 [10.131.0.52/23] from ovn-kubernetes logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:02 +0000 UTC Normal Pod my-jaeger-query-67899b895c-gw4q6.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:02 +0000 UTC Normal Pod my-jaeger-query-67899b895c-gw4q6.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:02 +0000 UTC Normal Pod my-jaeger-query-67899b895c-gw4q6.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:02 +0000 UTC Normal Pod my-jaeger-query-67899b895c-gw4q6.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:02 +0000 UTC Normal Pod my-jaeger-query-67899b895c-gw4q6.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:02 +0000 UTC Normal Pod my-jaeger-query-67899b895c-gw4q6.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:02 +0000 UTC Normal Pod my-jaeger-query-67899b895c-gw4q6.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:02 +0000 UTC Normal Pod my-jaeger-query-67899b895c-gw4q6.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:02 +0000 UTC Normal Pod my-jaeger-query-67899b895c-gw4q6.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:16 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:16 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:16:24 | es-from-aio-to-production | 2023-12-11 07:16:16 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:16:24 | es-from-aio-to-production | Deleting namespace: kuttl-test-selected-sturgeon === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (1284.35s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.69s) --- PASS: kuttl/harness/es-multiinstance (113.11s) --- PASS: kuttl/harness/es-streaming-autoprovisioned (187.09s) --- PASS: kuttl/harness/es-simple-prod (5.71s) --- PASS: kuttl/harness/es-rollover-autoprov (413.72s) --- PASS: kuttl/harness/es-increasing-replicas (169.89s) --- PASS: kuttl/harness/es-index-cleaner-autoprov (253.47s) --- PASS: kuttl/harness/es-from-aio-to-production (129.81s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name elasticsearch --report --output /logs/artifacts/elasticsearch.xml ./artifacts/kuttl-report.xml time="2023-12-11T07:16:42Z" level=debug msg="Setting a new name for the test suites" time="2023-12-11T07:16:42Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-11T07:16:42Z" level=debug msg="normalizing test case names" time="2023-12-11T07:16:42Z" level=debug msg="elasticsearch/artifacts -> elasticsearch_artifacts" time="2023-12-11T07:16:42Z" level=debug msg="elasticsearch/es-multiinstance -> elasticsearch_es_multiinstance" time="2023-12-11T07:16:42Z" level=debug msg="elasticsearch/es-streaming-autoprovisioned -> elasticsearch_es_streaming_autoprovisioned" time="2023-12-11T07:16:42Z" level=debug msg="elasticsearch/es-simple-prod -> elasticsearch_es_simple_prod" time="2023-12-11T07:16:42Z" level=debug msg="elasticsearch/es-rollover-autoprov -> elasticsearch_es_rollover_autoprov" time="2023-12-11T07:16:42Z" level=debug msg="elasticsearch/es-increasing-replicas -> elasticsearch_es_increasing_replicas" time="2023-12-11T07:16:42Z" level=debug msg="elasticsearch/es-index-cleaner-autoprov -> elasticsearch_es_index_cleaner_autoprov" time="2023-12-11T07:16:42Z" level=debug msg="elasticsearch/es-from-aio-to-production -> elasticsearch_es_from_aio_to_production" +--------------------------------------------+--------+ | NAME | RESULT | +--------------------------------------------+--------+ | elasticsearch_artifacts | passed | | elasticsearch_es_multiinstance | passed | | elasticsearch_es_streaming_autoprovisioned | passed | | elasticsearch_es_simple_prod | passed | | elasticsearch_es_rollover_autoprov | passed | | elasticsearch_es_increasing_replicas | passed | | elasticsearch_es_index_cleaner_autoprov | passed | | elasticsearch_es_from_aio_to_production | passed | +--------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh examples false true + '[' 3 -ne 3 ']' + test_suite_name=examples + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/examples.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-examples make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=3.6.0 \ SKIP_KAFKA=false \ VERTX_IMG=jaegertracing/vertx-create-span:operator-e2e-tests \ ./tests/e2e/examples/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-09-012410 True False 30m Cluster version is 4.15.0-0.nightly-2023-12-09-012410' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-09-012410 True False 30m Cluster version is 4.15.0-0.nightly-2023-12-09-012410' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/examples/render.sh ++ export SUITE_DIR=./tests/e2e/examples ++ SUITE_DIR=./tests/e2e/examples ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/examples ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test examples-agent-as-daemonset + '[' 1 -ne 1 ']' + test_name=examples-agent-as-daemonset + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-as-daemonset' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-as-daemonset\e[0m' Rendering files for test examples-agent-as-daemonset + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build + '[' _build '!=' _build ']' + mkdir -p examples-agent-as-daemonset + cd examples-agent-as-daemonset + example_name=agent-as-daemonset + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + render_install_example agent-as-daemonset 01 + '[' 2 -ne 2 ']' + example_name=agent-as-daemonset + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/agent-as-daemonset.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + JAEGER_NAME=agent-as-daemonset + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=DaemonSet ++ '[' DaemonSet = null ']' ++ echo DaemonSet ++ return 0 + jaeger_strategy=DaemonSet + '[' DaemonSet = DaemonSet ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example agent-as-daemonset 02 + '[' 2 -ne 2 ']' + example_name=agent-as-daemonset + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/agent-as-daemonset.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/agent-as-daemonset.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/agent-as-daemonset.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/agent-as-daemonset.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + jaeger_name=agent-as-daemonset + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test agent-as-daemonset true 02 + '[' 3 -ne 3 ']' + jaeger=agent-as-daemonset + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-agent-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-agent-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-with-priority-class\e[0m' Rendering files for test examples-agent-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-as-daemonset + '[' examples-agent-as-daemonset '!=' _build ']' + cd .. + mkdir -p examples-agent-with-priority-class + cd examples-agent-with-priority-class + example_name=agent-with-priority-class + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + render_install_example agent-with-priority-class 01 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/agent-with-priority-class.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + JAEGER_NAME=agent-as-daemonset + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=DaemonSet ++ '[' DaemonSet = null ']' ++ echo DaemonSet ++ return 0 + jaeger_strategy=DaemonSet + '[' DaemonSet = DaemonSet ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example agent-with-priority-class 02 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + jaeger_name=agent-as-daemonset + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test agent-as-daemonset true 02 + '[' 3 -ne 3 ']' + jaeger=agent-as-daemonset + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-all-in-one-with-options + '[' 1 -ne 1 ']' + test_name=examples-all-in-one-with-options + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-all-in-one-with-options' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-all-in-one-with-options\e[0m' Rendering files for test examples-all-in-one-with-options + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-with-priority-class + '[' examples-agent-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-all-in-one-with-options + cd examples-all-in-one-with-options + example_name=all-in-one-with-options + render_install_example all-in-one-with-options 00 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/all-in-one-with-options.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + JAEGER_NAME=my-jaeger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.metadata.name="my-jaeger"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i 'del(.spec.allInOne.image)' ./00-install.yaml + render_smoke_test_example all-in-one-with-options 01 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + jaeger_name=my-jaeger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + sed -i s~my-jaeger-query:443~my-jaeger-query:443/jaeger~gi ./01-smoke-test.yaml + '[' false = true ']' + start_test examples-auto-provision-kafka + '[' 1 -ne 1 ']' + test_name=examples-auto-provision-kafka + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-auto-provision-kafka' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-auto-provision-kafka\e[0m' Rendering files for test examples-auto-provision-kafka + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-all-in-one-with-options + '[' examples-all-in-one-with-options '!=' _build ']' + cd .. + mkdir -p examples-auto-provision-kafka + cd examples-auto-provision-kafka + example_name=auto-provision-kafka + render_install_kafka_operator 01 + '[' 1 -ne 1 ']' + test_step=01 + '[' true '!=' true ']' + render_install_example auto-provision-kafka 02 + '[' 2 -ne 2 ']' + example_name=auto-provision-kafka + test_step=02 + install_file=./02-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/auto-provision-kafka.yaml -o ./02-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./02-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./02-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./02-install.yaml ++ jaeger_name=auto-provision-kafka ++ '[' -z auto-provision-kafka ']' ++ echo auto-provision-kafka ++ return 0 + JAEGER_NAME=auto-provision-kafka + local jaeger_strategy ++ get_jaeger_strategy ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./02-install.yaml ++ strategy=streaming ++ '[' streaming = production ']' ++ '[' streaming = streaming ']' ++ echo streaming ++ return 0 + jaeger_strategy=streaming + '[' streaming = DaemonSet ']' + '[' streaming = allInOne ']' + '[' streaming = production ']' + '[' streaming = streaming ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/streaming-jaeger-assert.yaml.template -o ./02-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./02-install.yaml + mv ./02-assert.yaml ./05-assert.yaml + render_assert_kafka true auto-provision-kafka 02 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provision-kafka + test_step=02 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provision-kafka + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./02-assert.yaml ++ expr 02 + 1 + CLUSTER_NAME=auto-provision-kafka + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./3-assert.yaml ++ expr 02 + 2 + CLUSTER_NAME=auto-provision-kafka + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./04-assert.yaml + render_smoke_test_example auto-provision-kafka 06 + '[' 2 -ne 2 ']' + example_name=auto-provision-kafka + test_step=06 + deployment_file=/tmp/jaeger-tests/examples/auto-provision-kafka.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/auto-provision-kafka.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/auto-provision-kafka.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/auto-provision-kafka.yaml ++ jaeger_name=auto-provision-kafka ++ '[' -z auto-provision-kafka ']' ++ echo auto-provision-kafka ++ return 0 + jaeger_name=auto-provision-kafka + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test auto-provision-kafka true 06 + '[' 3 -ne 3 ']' + jaeger=auto-provision-kafka + is_secured=true + test_step=06 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://auto-provision-kafka-query:443 + JAEGER_QUERY_ENDPOINT=https://auto-provision-kafka-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://auto-provision-kafka-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://auto-provision-kafka-collector-headless:14268 + export JAEGER_NAME=auto-provision-kafka + JAEGER_NAME=auto-provision-kafka + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./06-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-business-application-injected-sidecar + '[' 1 -ne 1 ']' + test_name=examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-business-application-injected-sidecar' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-business-application-injected-sidecar\e[0m' Rendering files for test examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-auto-provision-kafka + '[' examples-auto-provision-kafka '!=' _build ']' + cd .. + mkdir -p examples-business-application-injected-sidecar + cd examples-business-application-injected-sidecar + example_name=simplest + cp /tmp/jaeger-tests/examples/business-application-injected-sidecar.yaml ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].image=strenv(VERTX_IMG)' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.path="/"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.port=8080' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.initialDelaySeconds=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.failureThreshold=3' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.periodSeconds=10' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.successThreshold=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.timeoutSeconds=1' ./00-install.yaml + render_install_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example simplest 02 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 02 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-collector-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-collector-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-collector-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-collector-with-priority-class\e[0m' Rendering files for test examples-collector-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-business-application-injected-sidecar + '[' examples-business-application-injected-sidecar '!=' _build ']' + cd .. + mkdir -p examples-collector-with-priority-class + cd examples-collector-with-priority-class + example_name=collector-with-priority-class + render_install_example collector-with-priority-class 00 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/collector-with-priority-class.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + JAEGER_NAME=collector-with-high-priority + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example collector-with-priority-class 01 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + jaeger_name=collector-with-high-priority + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test collector-with-high-priority true 01 + '[' 3 -ne 3 ']' + jaeger=collector-with-high-priority + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + export JAEGER_NAME=collector-with-high-priority + JAEGER_NAME=collector-with-high-priority + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-service-types + '[' 1 -ne 1 ']' + test_name=examples-service-types + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-service-types' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-service-types\e[0m' Rendering files for test examples-service-types + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-collector-with-priority-class + '[' examples-collector-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-service-types + cd examples-service-types + example_name=service-types + render_install_example service-types 00 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/service-types.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + JAEGER_NAME=service-types + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example service-types 01 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/service-types.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/service-types.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/service-types.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/service-types.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + jaeger_name=service-types + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test service-types true 01 + '[' 3 -ne 3 ']' + jaeger=service-types + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + export JAEGER_NAME=service-types + JAEGER_NAME=service-types + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod + '[' 1 -ne 1 ']' + test_name=examples-simple-prod + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod\e[0m' Rendering files for test examples-simple-prod + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-service-types + '[' examples-service-types '!=' _build ']' + cd .. + mkdir -p examples-simple-prod + cd examples-simple-prod + example_name=simple-prod + render_install_example simple-prod 01 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod 02 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod-with-volumes + '[' 1 -ne 1 ']' + test_name=examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod-with-volumes' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod-with-volumes\e[0m' Rendering files for test examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod + '[' examples-simple-prod '!=' _build ']' + cd .. + mkdir -p examples-simple-prod-with-volumes + cd examples-simple-prod-with-volumes + example_name=simple-prod-with-volumes + render_install_example simple-prod-with-volumes 01 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod-with-volumes 02 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + /tmp/jaeger-tests/bin/gomplate -f ./03-check-volume.yaml.template -o 03-check-volume.yaml + start_test examples-simplest + '[' 1 -ne 1 ']' + test_name=examples-simplest + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simplest' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simplest\e[0m' Rendering files for test examples-simplest + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod-with-volumes + '[' examples-simple-prod-with-volumes '!=' _build ']' + cd .. + mkdir -p examples-simplest + cd examples-simplest + example_name=simplest + render_install_example simplest 00 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 01 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger + '[' 1 -ne 1 ']' + test_name=examples-with-badger + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger\e[0m' Rendering files for test examples-with-badger + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simplest + '[' examples-simplest '!=' _build ']' + cd .. + mkdir -p examples-with-badger + cd examples-with-badger + example_name=with-badger + render_install_example with-badger 00 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + JAEGER_NAME=with-badger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger 01 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + jaeger_name=with-badger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + export JAEGER_NAME=with-badger + JAEGER_NAME=with-badger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger-and-volume + '[' 1 -ne 1 ']' + test_name=examples-with-badger-and-volume + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger-and-volume' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger-and-volume\e[0m' Rendering files for test examples-with-badger-and-volume + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger + '[' examples-with-badger '!=' _build ']' + cd .. + mkdir -p examples-with-badger-and-volume + cd examples-with-badger-and-volume + example_name=with-badger-and-volume + render_install_example with-badger-and-volume 00 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger-and-volume.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + JAEGER_NAME=with-badger-and-volume + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger-and-volume 01 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + jaeger_name=with-badger-and-volume + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger-and-volume true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger-and-volume + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + export JAEGER_NAME=with-badger-and-volume + JAEGER_NAME=with-badger-and-volume + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-cassandra + '[' 1 -ne 1 ']' + test_name=examples-with-cassandra + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-cassandra' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-cassandra\e[0m' Rendering files for test examples-with-cassandra + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger-and-volume + '[' examples-with-badger-and-volume '!=' _build ']' + cd .. + mkdir -p examples-with-cassandra + cd examples-with-cassandra + example_name=with-cassandra + render_install_cassandra 00 + '[' 1 -ne 1 ']' + test_step=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-assert.yaml.template -o ./00-assert.yaml + render_install_example with-cassandra 01 + '[' 2 -ne 2 ']' + example_name=with-cassandra + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-cassandra.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=with-cassandra ++ '[' -z with-cassandra ']' ++ echo with-cassandra ++ return 0 + JAEGER_NAME=with-cassandra + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example with-cassandra 02 + '[' 2 -ne 2 ']' + example_name=with-cassandra + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/with-cassandra.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-cassandra.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-cassandra.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-cassandra.yaml ++ jaeger_name=with-cassandra ++ '[' -z with-cassandra ']' ++ echo with-cassandra ++ return 0 + jaeger_name=with-cassandra + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-cassandra true 02 + '[' 3 -ne 3 ']' + jaeger=with-cassandra + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 + JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 + export JAEGER_NAME=with-cassandra + JAEGER_NAME=with-cassandra + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-sampling + '[' 1 -ne 1 ']' + test_name=examples-with-sampling + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-sampling' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-sampling\e[0m' Rendering files for test examples-with-sampling + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-cassandra + '[' examples-with-cassandra '!=' _build ']' + cd .. + mkdir -p examples-with-sampling + cd examples-with-sampling + export example_name=with-sampling + example_name=with-sampling + render_install_cassandra 00 + '[' 1 -ne 1 ']' + test_step=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-assert.yaml.template -o ./00-assert.yaml + render_install_example with-sampling 01 + '[' 2 -ne 2 ']' + example_name=with-sampling + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-sampling.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=with-sampling ++ '[' -z with-sampling ']' ++ echo with-sampling ++ return 0 + JAEGER_NAME=with-sampling + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example with-sampling 02 + '[' 2 -ne 2 ']' + example_name=with-sampling + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/with-sampling.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-sampling.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-sampling.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-sampling.yaml ++ jaeger_name=with-sampling ++ '[' -z with-sampling ']' ++ echo with-sampling ++ return 0 + jaeger_name=with-sampling + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-sampling true 02 + '[' 3 -ne 3 ']' + jaeger=with-sampling + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 + JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 + export JAEGER_NAME=with-sampling + JAEGER_NAME=with-sampling + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + start_test examples-openshift-agent-as-daemonset + '[' 1 -ne 1 ']' + test_name=examples-openshift-agent-as-daemonset + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-openshift-agent-as-daemonset' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-openshift-agent-as-daemonset\e[0m' Rendering files for test examples-openshift-agent-as-daemonset + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-sampling + '[' examples-with-sampling '!=' _build ']' + cd .. + mkdir -p examples-openshift-agent-as-daemonset + cd examples-openshift-agent-as-daemonset + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/agent-as-daemonset.yaml -o 02-install.yaml + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./02-assert.yaml + render_install_vertx 03 + '[' 1 -ne 1 ']' + test_step=03 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./03-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].env=[{"name": "JAEGER_AGENT_HOST", "valueFrom": {"fieldRef": {"apiVersion": "v1", "fieldPath": "status.hostIP"}}}]' ./03-install.yaml + render_find_service agent-as-daemonset production order 00 04 + '[' 5 -ne 5 ']' + jaeger=agent-as-daemonset + deployment_strategy=production + service_name=order + job_number=00 + test_step=04 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' production '!=' allInOne ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/find-service.yaml.template -o ./04-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + start_test examples-openshift-with-htpasswd + '[' 1 -ne 1 ']' + test_name=examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-openshift-with-htpasswd' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-openshift-with-htpasswd\e[0m' Rendering files for test examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-openshift-agent-as-daemonset + '[' examples-openshift-agent-as-daemonset '!=' _build ']' + cd .. + mkdir -p examples-openshift-with-htpasswd + cd examples-openshift-with-htpasswd + export JAEGER_NAME=with-htpasswd + JAEGER_NAME=with-htpasswd + export JAEGER_USERNAME=awesomeuser + JAEGER_USERNAME=awesomeuser + export JAEGER_PASSWORD=awesomepassword + JAEGER_PASSWORD=awesomepassword + export 'JAEGER_USER_PASSWORD_HASH=awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' + JAEGER_USER_PASSWORD_HASH='awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ echo 'awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ base64 + SECRET=YXdlc29tZXVzZXI6e1NIQX11VWRxUFZVeXFOQm1FUlUwUXhqM0tGYVpuanc9Cg== + /tmp/jaeger-tests/bin/gomplate -f ./00-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/with-htpasswd.yaml -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + export 'GET_URL_COMMAND=kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + export 'URL=https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + INSECURE=true + JAEGER_USERNAME= + JAEGER_PASSWORD= + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-unsecured.yaml + JAEGER_USERNAME=wronguser + JAEGER_PASSWORD=wrongpassword + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-check-unauthorized.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./04-check-authorized.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running examples E2E tests' Running examples E2E tests + cd tests/e2e/examples/_build + set +e + KUBECONFIG=/tmp/kubeconfig-282053367 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 17 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/examples-agent-as-daemonset === PAUSE kuttl/harness/examples-agent-as-daemonset === RUN kuttl/harness/examples-agent-with-priority-class === PAUSE kuttl/harness/examples-agent-with-priority-class === RUN kuttl/harness/examples-all-in-one-with-options === PAUSE kuttl/harness/examples-all-in-one-with-options === RUN kuttl/harness/examples-auto-provision-kafka === PAUSE kuttl/harness/examples-auto-provision-kafka === RUN kuttl/harness/examples-business-application-injected-sidecar === PAUSE kuttl/harness/examples-business-application-injected-sidecar === RUN kuttl/harness/examples-collector-with-priority-class === PAUSE kuttl/harness/examples-collector-with-priority-class === RUN kuttl/harness/examples-openshift-agent-as-daemonset === PAUSE kuttl/harness/examples-openshift-agent-as-daemonset === RUN kuttl/harness/examples-openshift-with-htpasswd === PAUSE kuttl/harness/examples-openshift-with-htpasswd === RUN kuttl/harness/examples-service-types === PAUSE kuttl/harness/examples-service-types === RUN kuttl/harness/examples-simple-prod === PAUSE kuttl/harness/examples-simple-prod === RUN kuttl/harness/examples-simple-prod-with-volumes === PAUSE kuttl/harness/examples-simple-prod-with-volumes === RUN kuttl/harness/examples-simplest === PAUSE kuttl/harness/examples-simplest === RUN kuttl/harness/examples-with-badger === PAUSE kuttl/harness/examples-with-badger === RUN kuttl/harness/examples-with-badger-and-volume === PAUSE kuttl/harness/examples-with-badger-and-volume === RUN kuttl/harness/examples-with-cassandra === PAUSE kuttl/harness/examples-with-cassandra === RUN kuttl/harness/examples-with-sampling === PAUSE kuttl/harness/examples-with-sampling === CONT kuttl/harness/artifacts logger.go:42: 07:18:09 | artifacts | Creating namespace: kuttl-test-enabled-stingray logger.go:42: 07:18:09 | artifacts | artifacts events from ns kuttl-test-enabled-stingray: logger.go:42: 07:18:09 | artifacts | Deleting namespace: kuttl-test-enabled-stingray === CONT kuttl/harness/examples-service-types logger.go:42: 07:18:15 | examples-service-types | Creating namespace: kuttl-test-crack-gannet logger.go:42: 07:18:15 | examples-service-types/0-install | starting test step 0-install logger.go:42: 07:18:15 | examples-service-types/0-install | Jaeger:kuttl-test-crack-gannet/service-types created logger.go:42: 07:18:22 | examples-service-types/0-install | test step completed 0-install logger.go:42: 07:18:22 | examples-service-types/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:18:22 | examples-service-types/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE service-types /dev/null] logger.go:42: 07:18:24 | examples-service-types/1-smoke-test | Warning: resource jaegers/service-types is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:18:30 | examples-service-types/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://service-types-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:18:31 | examples-service-types/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:18:31 | examples-service-types/1-smoke-test | job.batch/report-span created logger.go:42: 07:18:31 | examples-service-types/1-smoke-test | job.batch/check-span created logger.go:42: 07:18:44 | examples-service-types/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:18:44 | examples-service-types/2- | starting test step 2- logger.go:42: 07:18:44 | examples-service-types/2- | test step completed 2- logger.go:42: 07:18:44 | examples-service-types | examples-service-types events from ns kuttl-test-crack-gannet: logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:18 +0000 UTC Normal Service service-types-query EnsuringLoadBalancer Ensuring load balancer service-controller logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:19 +0000 UTC Normal Pod service-types-5c6f578cf7-hrx22 Binding Scheduled Successfully assigned kuttl-test-crack-gannet/service-types-5c6f578cf7-hrx22 to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:19 +0000 UTC Warning Pod service-types-5c6f578cf7-hrx22 FailedMount MountVolume.SetUp failed for volume "service-types-collector-tls-config-volume" : secret "service-types-collector-headless-tls" not found kubelet logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:19 +0000 UTC Normal ReplicaSet.apps service-types-5c6f578cf7 SuccessfulCreate Created pod: service-types-5c6f578cf7-hrx22 replicaset-controller logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:19 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-5c6f578cf7 to 1 deployment-controller logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:20 +0000 UTC Normal Pod service-types-5c6f578cf7-hrx22 AddedInterface Add eth0 [10.129.2.29/23] from ovn-kubernetes logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:20 +0000 UTC Normal Pod service-types-5c6f578cf7-hrx22.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:20 +0000 UTC Normal Pod service-types-5c6f578cf7-hrx22.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:20 +0000 UTC Normal Pod service-types-5c6f578cf7-hrx22.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:20 +0000 UTC Normal Pod service-types-5c6f578cf7-hrx22.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:20 +0000 UTC Normal Pod service-types-5c6f578cf7-hrx22.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:20 +0000 UTC Normal Pod service-types-5c6f578cf7-hrx22.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:22 +0000 UTC Normal Service service-types-collector EnsuringLoadBalancer Ensuring load balancer service-controller logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:22 +0000 UTC Normal Service service-types-query EnsuredLoadBalancer Ensured load balancer service-controller logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:24 +0000 UTC Normal Service service-types-collector EnsuredLoadBalancer Ensured load balancer service-controller logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:26 +0000 UTC Normal Pod service-types-5c6f578cf7-hrx22.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:26 +0000 UTC Normal Pod service-types-5c6f578cf7-hrx22.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:26 +0000 UTC Normal ReplicaSet.apps service-types-5c6f578cf7 SuccessfulDelete Deleted pod: service-types-5c6f578cf7-hrx22 replicaset-controller logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:26 +0000 UTC Normal Pod service-types-7bcfd9c994-hdvtd Binding Scheduled Successfully assigned kuttl-test-crack-gannet/service-types-7bcfd9c994-hdvtd to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:26 +0000 UTC Normal ReplicaSet.apps service-types-7bcfd9c994 SuccessfulCreate Created pod: service-types-7bcfd9c994-hdvtd replicaset-controller logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:26 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled down replica set service-types-5c6f578cf7 to 0 from 1 deployment-controller logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:26 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-7bcfd9c994 to 1 deployment-controller logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:27 +0000 UTC Normal Pod service-types-7bcfd9c994-hdvtd AddedInterface Add eth0 [10.129.2.30/23] from ovn-kubernetes logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:27 +0000 UTC Normal Pod service-types-7bcfd9c994-hdvtd.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:27 +0000 UTC Normal Pod service-types-7bcfd9c994-hdvtd.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:27 +0000 UTC Normal Pod service-types-7bcfd9c994-hdvtd.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:27 +0000 UTC Normal Pod service-types-7bcfd9c994-hdvtd.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:27 +0000 UTC Normal Pod service-types-7bcfd9c994-hdvtd.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:27 +0000 UTC Normal Pod service-types-7bcfd9c994-hdvtd.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:31 +0000 UTC Normal Pod check-span-kjhfh Binding Scheduled Successfully assigned kuttl-test-crack-gannet/check-span-kjhfh to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:31 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-kjhfh job-controller logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:31 +0000 UTC Normal Pod report-span-jcspv Binding Scheduled Successfully assigned kuttl-test-crack-gannet/report-span-jcspv to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:31 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-jcspv job-controller logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:32 +0000 UTC Normal Pod check-span-kjhfh AddedInterface Add eth0 [10.128.2.56/23] from ovn-kubernetes logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:32 +0000 UTC Normal Pod check-span-kjhfh.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:32 +0000 UTC Normal Pod check-span-kjhfh.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:32 +0000 UTC Normal Pod check-span-kjhfh.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:32 +0000 UTC Normal Pod report-span-jcspv AddedInterface Add eth0 [10.131.0.53/23] from ovn-kubernetes logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:32 +0000 UTC Normal Pod report-span-jcspv.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:32 +0000 UTC Normal Pod report-span-jcspv.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:32 +0000 UTC Normal Pod report-span-jcspv.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:18:44 | examples-service-types | 2023-12-11 07:18:43 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:18:44 | examples-service-types | Deleting namespace: kuttl-test-crack-gannet === CONT kuttl/harness/examples-with-sampling logger.go:42: 07:19:15 | examples-with-sampling | Creating namespace: kuttl-test-deciding-stag logger.go:42: 07:19:15 | examples-with-sampling/0-install | starting test step 0-install logger.go:42: 07:19:15 | examples-with-sampling/0-install | running command: [sh -c cd /tmp/jaeger-tests && make cassandra STORAGE_NAMESPACE=$NAMESPACE] logger.go:42: 07:19:15 | examples-with-sampling/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 07:19:15 | examples-with-sampling/0-install | >>>> Creating namespace kuttl-test-deciding-stag logger.go:42: 07:19:15 | examples-with-sampling/0-install | kubectl create namespace kuttl-test-deciding-stag 2>&1 | grep -v "already exists" || true logger.go:42: 07:19:15 | examples-with-sampling/0-install | kubectl create -f ./tests/cassandra.yml --namespace kuttl-test-deciding-stag 2>&1 | grep -v "already exists" || true logger.go:42: 07:19:16 | examples-with-sampling/0-install | service/cassandra created logger.go:42: 07:19:16 | examples-with-sampling/0-install | statefulset.apps/cassandra created logger.go:42: 07:19:16 | examples-with-sampling/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 07:19:27 | examples-with-sampling/0-install | test step completed 0-install logger.go:42: 07:19:27 | examples-with-sampling/1-install | starting test step 1-install logger.go:42: 07:19:27 | examples-with-sampling/1-install | Jaeger:kuttl-test-deciding-stag/with-sampling created logger.go:42: 07:19:33 | examples-with-sampling/1-install | test step completed 1-install logger.go:42: 07:19:33 | examples-with-sampling/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:19:33 | examples-with-sampling/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-sampling /dev/null] logger.go:42: 07:19:35 | examples-with-sampling/2-smoke-test | Warning: resource jaegers/with-sampling is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:19:42 | examples-with-sampling/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:19:42 | examples-with-sampling/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:19:43 | examples-with-sampling/2-smoke-test | job.batch/report-span created logger.go:42: 07:19:43 | examples-with-sampling/2-smoke-test | job.batch/check-span created logger.go:42: 07:19:55 | examples-with-sampling/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:19:55 | examples-with-sampling/3- | starting test step 3- logger.go:42: 07:19:55 | examples-with-sampling/3- | test step completed 3- logger.go:42: 07:19:55 | examples-with-sampling | examples-with-sampling events from ns kuttl-test-deciding-stag: logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:16 +0000 UTC Normal Pod cassandra-0 Binding Scheduled Successfully assigned kuttl-test-deciding-stag/cassandra-0 to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:16 +0000 UTC Normal Pod cassandra-0 AddedInterface Add eth0 [10.129.2.31/23] from ovn-kubernetes logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:16 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulling Pulling image "cassandra:3.11" kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:16 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-0 in StatefulSet cassandra successful statefulset-controller logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:20 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulled Successfully pulled image "cassandra:3.11" in 4.07s (4.07s including waiting) kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:20 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:20 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:20 +0000 UTC Normal Pod cassandra-1 Binding Scheduled Successfully assigned kuttl-test-deciding-stag/cassandra-1 to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:20 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-1 in StatefulSet cassandra successful statefulset-controller logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:21 +0000 UTC Normal Pod cassandra-1 AddedInterface Add eth0 [10.131.0.54/23] from ovn-kubernetes logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:21 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulling Pulling image "cassandra:3.11" kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:25 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulled Successfully pulled image "cassandra:3.11" in 4.347s (4.347s including waiting) kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:25 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:25 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:30 +0000 UTC Normal Pod with-sampling-558d848cf-6t2zf Binding Scheduled Successfully assigned kuttl-test-deciding-stag/with-sampling-558d848cf-6t2zf to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:30 +0000 UTC Normal ReplicaSet.apps with-sampling-558d848cf SuccessfulCreate Created pod: with-sampling-558d848cf-6t2zf replicaset-controller logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:30 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled up replica set with-sampling-558d848cf to 1 deployment-controller logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:31 +0000 UTC Normal Pod with-sampling-558d848cf-6t2zf AddedInterface Add eth0 [10.129.2.32/23] from ovn-kubernetes logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:31 +0000 UTC Normal Pod with-sampling-558d848cf-6t2zf.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:31 +0000 UTC Normal Pod with-sampling-558d848cf-6t2zf.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:31 +0000 UTC Normal Pod with-sampling-558d848cf-6t2zf.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:31 +0000 UTC Normal Pod with-sampling-558d848cf-6t2zf.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:31 +0000 UTC Normal Pod with-sampling-558d848cf-6t2zf.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:31 +0000 UTC Normal Pod with-sampling-558d848cf-6t2zf.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:38 +0000 UTC Normal Pod with-sampling-558d848cf-6t2zf.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:38 +0000 UTC Normal Pod with-sampling-558d848cf-6t2zf.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:38 +0000 UTC Normal ReplicaSet.apps with-sampling-558d848cf SuccessfulDelete Deleted pod: with-sampling-558d848cf-6t2zf replicaset-controller logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:38 +0000 UTC Normal Pod with-sampling-5ffb497df7-kwqfh Binding Scheduled Successfully assigned kuttl-test-deciding-stag/with-sampling-5ffb497df7-kwqfh to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:38 +0000 UTC Normal ReplicaSet.apps with-sampling-5ffb497df7 SuccessfulCreate Created pod: with-sampling-5ffb497df7-kwqfh replicaset-controller logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:38 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled down replica set with-sampling-558d848cf to 0 from 1 deployment-controller logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:38 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled up replica set with-sampling-5ffb497df7 to 1 deployment-controller logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:39 +0000 UTC Normal Pod with-sampling-5ffb497df7-kwqfh AddedInterface Add eth0 [10.129.2.33/23] from ovn-kubernetes logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:39 +0000 UTC Normal Pod with-sampling-5ffb497df7-kwqfh.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:39 +0000 UTC Normal Pod with-sampling-5ffb497df7-kwqfh.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:39 +0000 UTC Normal Pod with-sampling-5ffb497df7-kwqfh.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:39 +0000 UTC Normal Pod with-sampling-5ffb497df7-kwqfh.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:39 +0000 UTC Normal Pod with-sampling-5ffb497df7-kwqfh.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:39 +0000 UTC Normal Pod with-sampling-5ffb497df7-kwqfh.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:43 +0000 UTC Normal Pod check-span-lqn6r Binding Scheduled Successfully assigned kuttl-test-deciding-stag/check-span-lqn6r to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:43 +0000 UTC Normal Pod check-span-lqn6r AddedInterface Add eth0 [10.131.0.55/23] from ovn-kubernetes logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:43 +0000 UTC Normal Pod check-span-lqn6r.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:43 +0000 UTC Normal Pod check-span-lqn6r.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:43 +0000 UTC Normal Pod check-span-lqn6r.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:43 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-lqn6r job-controller logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:43 +0000 UTC Normal Pod report-span-bcbbs Binding Scheduled Successfully assigned kuttl-test-deciding-stag/report-span-bcbbs to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:43 +0000 UTC Normal Pod report-span-bcbbs AddedInterface Add eth0 [10.128.2.57/23] from ovn-kubernetes logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:43 +0000 UTC Normal Pod report-span-bcbbs.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:43 +0000 UTC Normal Pod report-span-bcbbs.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:43 +0000 UTC Normal Pod report-span-bcbbs.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:43 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-bcbbs job-controller logger.go:42: 07:19:55 | examples-with-sampling | 2023-12-11 07:19:54 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:19:55 | examples-with-sampling | Deleting namespace: kuttl-test-deciding-stag === CONT kuttl/harness/examples-with-cassandra logger.go:42: 07:20:12 | examples-with-cassandra | Creating namespace: kuttl-test-pet-cattle logger.go:42: 07:20:12 | examples-with-cassandra/0-install | starting test step 0-install logger.go:42: 07:20:12 | examples-with-cassandra/0-install | running command: [sh -c cd /tmp/jaeger-tests && make cassandra STORAGE_NAMESPACE=$NAMESPACE] logger.go:42: 07:20:12 | examples-with-cassandra/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 07:20:12 | examples-with-cassandra/0-install | >>>> Creating namespace kuttl-test-pet-cattle logger.go:42: 07:20:12 | examples-with-cassandra/0-install | kubectl create namespace kuttl-test-pet-cattle 2>&1 | grep -v "already exists" || true logger.go:42: 07:20:13 | examples-with-cassandra/0-install | kubectl create -f ./tests/cassandra.yml --namespace kuttl-test-pet-cattle 2>&1 | grep -v "already exists" || true logger.go:42: 07:20:13 | examples-with-cassandra/0-install | service/cassandra created logger.go:42: 07:20:13 | examples-with-cassandra/0-install | statefulset.apps/cassandra created logger.go:42: 07:20:13 | examples-with-cassandra/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 07:20:15 | examples-with-cassandra/0-install | test step completed 0-install logger.go:42: 07:20:15 | examples-with-cassandra/1-install | starting test step 1-install logger.go:42: 07:20:16 | examples-with-cassandra/1-install | Jaeger:kuttl-test-pet-cattle/with-cassandra created logger.go:42: 07:20:33 | examples-with-cassandra/1-install | test step completed 1-install logger.go:42: 07:20:33 | examples-with-cassandra/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:20:33 | examples-with-cassandra/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-cassandra /dev/null] logger.go:42: 07:20:52 | examples-with-cassandra/2-smoke-test | Warning: resource jaegers/with-cassandra is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:21:43 | examples-with-cassandra/2-smoke-test | Unable to connect to the server: context deadline exceeded (Client.Timeout exceeded while awaiting headers) logger.go:42: 07:21:43 | examples-with-cassandra/2-smoke-test | command failure, skipping 2 additional commands case.go:364: failed in step 2-smoke-test case.go:366: exit status 1 logger.go:42: 07:21:43 | examples-with-cassandra | examples-with-cassandra events from ns kuttl-test-pet-cattle: logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:13 +0000 UTC Normal Pod cassandra-0 Binding Scheduled Successfully assigned kuttl-test-pet-cattle/cassandra-0 to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:13 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-0 in StatefulSet cassandra successful statefulset-controller logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:14 +0000 UTC Normal Pod cassandra-0 AddedInterface Add eth0 [10.129.2.34/23] from ovn-kubernetes logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:14 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulled Container image "cassandra:3.11" already present on machine kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:14 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:14 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:15 +0000 UTC Normal Pod cassandra-1 Binding Scheduled Successfully assigned kuttl-test-pet-cattle/cassandra-1 to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:15 +0000 UTC Normal Pod cassandra-1 AddedInterface Add eth0 [10.131.0.56/23] from ovn-kubernetes logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:15 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulled Container image "cassandra:3.11" already present on machine kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:15 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:15 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:15 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-1 in StatefulSet cassandra successful statefulset-controller logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:19 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-fklbs Binding Scheduled Successfully assigned kuttl-test-pet-cattle/with-cassandra-cassandra-schema-job-fklbs to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:19 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-fklbs AddedInterface Add eth0 [10.129.2.35/23] from ovn-kubernetes logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:19 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-fklbs.spec.containers{with-cassandra-cassandra-schema-job} Pulling Pulling image "jaegertracing/jaeger-cassandra-schema:1.51.0" kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:19 +0000 UTC Normal Job.batch with-cassandra-cassandra-schema-job SuccessfulCreate Created pod: with-cassandra-cassandra-schema-job-fklbs job-controller logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:24 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-fklbs.spec.containers{with-cassandra-cassandra-schema-job} Pulled Successfully pulled image "jaegertracing/jaeger-cassandra-schema:1.51.0" in 4.942s (4.942s including waiting) kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:24 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-fklbs.spec.containers{with-cassandra-cassandra-schema-job} Created Created container with-cassandra-cassandra-schema-job kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:24 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-fklbs.spec.containers{with-cassandra-cassandra-schema-job} Started Started container with-cassandra-cassandra-schema-job kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:31 +0000 UTC Normal Pod with-cassandra-6864d6c58b-vkhtx Binding Scheduled Successfully assigned kuttl-test-pet-cattle/with-cassandra-6864d6c58b-vkhtx to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:31 +0000 UTC Normal Pod with-cassandra-6864d6c58b-vkhtx AddedInterface Add eth0 [10.129.2.36/23] from ovn-kubernetes logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:31 +0000 UTC Normal Pod with-cassandra-6864d6c58b-vkhtx.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:31 +0000 UTC Normal Pod with-cassandra-6864d6c58b-vkhtx.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:31 +0000 UTC Normal Pod with-cassandra-6864d6c58b-vkhtx.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:31 +0000 UTC Normal Pod with-cassandra-6864d6c58b-vkhtx.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:31 +0000 UTC Normal ReplicaSet.apps with-cassandra-6864d6c58b SuccessfulCreate Created pod: with-cassandra-6864d6c58b-vkhtx replicaset-controller logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:31 +0000 UTC Normal Job.batch with-cassandra-cassandra-schema-job Completed Job completed job-controller logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:31 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled up replica set with-cassandra-6864d6c58b to 1 deployment-controller logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:32 +0000 UTC Normal Pod with-cassandra-6864d6c58b-vkhtx.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:32 +0000 UTC Normal Pod with-cassandra-6864d6c58b-vkhtx.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:54 +0000 UTC Normal Pod with-cassandra-6864d6c58b-vkhtx.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:54 +0000 UTC Normal Pod with-cassandra-6864d6c58b-vkhtx.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:54 +0000 UTC Normal ReplicaSet.apps with-cassandra-6864d6c58b SuccessfulDelete Deleted pod: with-cassandra-6864d6c58b-vkhtx replicaset-controller logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:54 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled down replica set with-cassandra-6864d6c58b to 0 from 1 deployment-controller logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:55 +0000 UTC Normal Pod with-cassandra-6fc4c5dffd-b464p Binding Scheduled Successfully assigned kuttl-test-pet-cattle/with-cassandra-6fc4c5dffd-b464p to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:55 +0000 UTC Normal Pod with-cassandra-6fc4c5dffd-b464p AddedInterface Add eth0 [10.129.2.37/23] from ovn-kubernetes logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:55 +0000 UTC Normal Pod with-cassandra-6fc4c5dffd-b464p.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:55 +0000 UTC Normal Pod with-cassandra-6fc4c5dffd-b464p.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:55 +0000 UTC Normal ReplicaSet.apps with-cassandra-6fc4c5dffd SuccessfulCreate Created pod: with-cassandra-6fc4c5dffd-b464p replicaset-controller logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:55 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled up replica set with-cassandra-6fc4c5dffd to 1 deployment-controller logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:56 +0000 UTC Normal Pod with-cassandra-6fc4c5dffd-b464p.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:56 +0000 UTC Normal Pod with-cassandra-6fc4c5dffd-b464p.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:56 +0000 UTC Normal Pod with-cassandra-6fc4c5dffd-b464p.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:21:43 | examples-with-cassandra | 2023-12-11 07:20:56 +0000 UTC Normal Pod with-cassandra-6fc4c5dffd-b464p.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:21:43 | examples-with-cassandra | Deleting namespace: kuttl-test-pet-cattle === CONT kuttl/harness/examples-with-badger-and-volume logger.go:42: 07:22:08 | examples-with-badger-and-volume | Creating namespace: kuttl-test-outgoing-civet logger.go:42: 07:22:08 | examples-with-badger-and-volume/0-install | starting test step 0-install logger.go:42: 07:22:08 | examples-with-badger-and-volume/0-install | Jaeger:kuttl-test-outgoing-civet/with-badger-and-volume created logger.go:42: 07:22:14 | examples-with-badger-and-volume/0-install | test step completed 0-install logger.go:42: 07:22:14 | examples-with-badger-and-volume/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:22:14 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-badger-and-volume /dev/null] logger.go:42: 07:25:43 | examples-with-badger-and-volume/1-smoke-test | Warning: resource jaegers/with-badger-and-volume is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:25:50 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:25:50 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:25:51 | examples-with-badger-and-volume/1-smoke-test | job.batch/report-span created logger.go:42: 07:25:51 | examples-with-badger-and-volume/1-smoke-test | job.batch/check-span created logger.go:42: 07:26:02 | examples-with-badger-and-volume/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:26:02 | examples-with-badger-and-volume | examples-with-badger-and-volume events from ns kuttl-test-outgoing-civet: logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:22:11 +0000 UTC Normal Pod with-badger-and-volume-6cd54474f9-b6mbd Binding Scheduled Successfully assigned kuttl-test-outgoing-civet/with-badger-and-volume-6cd54474f9-b6mbd to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:22:11 +0000 UTC Warning Pod with-badger-and-volume-6cd54474f9-b6mbd FailedMount MountVolume.SetUp failed for volume "with-badger-and-volume-ui-oauth-proxy-tls" : secret "with-badger-and-volume-ui-oauth-proxy-tls" not found kubelet logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:22:11 +0000 UTC Normal ReplicaSet.apps with-badger-and-volume-6cd54474f9 SuccessfulCreate Created pod: with-badger-and-volume-6cd54474f9-b6mbd replicaset-controller logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:22:11 +0000 UTC Normal Deployment.apps with-badger-and-volume ScalingReplicaSet Scaled up replica set with-badger-and-volume-6cd54474f9 to 1 deployment-controller logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:22:12 +0000 UTC Normal Pod with-badger-and-volume-6cd54474f9-b6mbd AddedInterface Add eth0 [10.129.2.38/23] from ovn-kubernetes logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:22:12 +0000 UTC Normal Pod with-badger-and-volume-6cd54474f9-b6mbd.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:22:12 +0000 UTC Normal Pod with-badger-and-volume-6cd54474f9-b6mbd.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:22:12 +0000 UTC Normal Pod with-badger-and-volume-6cd54474f9-b6mbd.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:22:12 +0000 UTC Normal Pod with-badger-and-volume-6cd54474f9-b6mbd.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:22:12 +0000 UTC Normal Pod with-badger-and-volume-6cd54474f9-b6mbd.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:22:12 +0000 UTC Normal Pod with-badger-and-volume-6cd54474f9-b6mbd.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:44 +0000 UTC Normal Pod with-badger-and-volume-6cd54474f9-b6mbd.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:44 +0000 UTC Normal Pod with-badger-and-volume-6cd54474f9-b6mbd.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:44 +0000 UTC Normal ReplicaSet.apps with-badger-and-volume-6cd54474f9 SuccessfulDelete Deleted pod: with-badger-and-volume-6cd54474f9-b6mbd replicaset-controller logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:44 +0000 UTC Normal Deployment.apps with-badger-and-volume ScalingReplicaSet Scaled down replica set with-badger-and-volume-6cd54474f9 to 0 from 1 deployment-controller logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:45 +0000 UTC Normal Pod with-badger-and-volume-5b486c86-cvzpt Binding Scheduled Successfully assigned kuttl-test-outgoing-civet/with-badger-and-volume-5b486c86-cvzpt to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:45 +0000 UTC Normal ReplicaSet.apps with-badger-and-volume-5b486c86 SuccessfulCreate Created pod: with-badger-and-volume-5b486c86-cvzpt replicaset-controller logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:45 +0000 UTC Normal Deployment.apps with-badger-and-volume ScalingReplicaSet Scaled up replica set with-badger-and-volume-5b486c86 to 1 deployment-controller logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:46 +0000 UTC Normal Pod with-badger-and-volume-5b486c86-cvzpt AddedInterface Add eth0 [10.129.2.39/23] from ovn-kubernetes logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:46 +0000 UTC Normal Pod with-badger-and-volume-5b486c86-cvzpt.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:46 +0000 UTC Normal Pod with-badger-and-volume-5b486c86-cvzpt.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:46 +0000 UTC Normal Pod with-badger-and-volume-5b486c86-cvzpt.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:46 +0000 UTC Normal Pod with-badger-and-volume-5b486c86-cvzpt.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:46 +0000 UTC Normal Pod with-badger-and-volume-5b486c86-cvzpt.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:46 +0000 UTC Normal Pod with-badger-and-volume-5b486c86-cvzpt.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:51 +0000 UTC Normal Pod check-span-jb96m Binding Scheduled Successfully assigned kuttl-test-outgoing-civet/check-span-jb96m to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:51 +0000 UTC Normal Pod check-span-jb96m AddedInterface Add eth0 [10.128.2.58/23] from ovn-kubernetes logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:51 +0000 UTC Normal Pod check-span-jb96m.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:51 +0000 UTC Normal Pod check-span-jb96m.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:51 +0000 UTC Normal Pod check-span-jb96m.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:51 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-jb96m job-controller logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:51 +0000 UTC Normal Pod report-span-cvzgq Binding Scheduled Successfully assigned kuttl-test-outgoing-civet/report-span-cvzgq to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:51 +0000 UTC Normal Pod report-span-cvzgq AddedInterface Add eth0 [10.131.0.57/23] from ovn-kubernetes logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:51 +0000 UTC Normal Pod report-span-cvzgq.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:51 +0000 UTC Normal Pod report-span-cvzgq.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:51 +0000 UTC Normal Pod report-span-cvzgq.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:25:51 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-cvzgq job-controller logger.go:42: 07:26:02 | examples-with-badger-and-volume | 2023-12-11 07:26:02 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:26:02 | examples-with-badger-and-volume | Deleting namespace: kuttl-test-outgoing-civet === CONT kuttl/harness/examples-with-badger logger.go:42: 07:26:50 | examples-with-badger | Creating namespace: kuttl-test-enough-antelope logger.go:42: 07:26:50 | examples-with-badger/0-install | starting test step 0-install logger.go:42: 07:26:51 | examples-with-badger/0-install | Jaeger:kuttl-test-enough-antelope/with-badger created logger.go:42: 07:26:57 | examples-with-badger/0-install | test step completed 0-install logger.go:42: 07:26:57 | examples-with-badger/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:26:57 | examples-with-badger/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-badger /dev/null] logger.go:42: 07:27:50 | examples-with-badger/1-smoke-test | Warning: resource jaegers/with-badger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:30:42 | examples-with-badger/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:30:43 | examples-with-badger/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:30:43 | examples-with-badger/1-smoke-test | job.batch/report-span created logger.go:42: 07:30:43 | examples-with-badger/1-smoke-test | job.batch/check-span created logger.go:42: 07:30:55 | examples-with-badger/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:30:55 | examples-with-badger | examples-with-badger events from ns kuttl-test-enough-antelope: logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:26:54 +0000 UTC Normal Pod with-badger-d6f9945bc-mzdpq Binding Scheduled Successfully assigned kuttl-test-enough-antelope/with-badger-d6f9945bc-mzdpq to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:26:54 +0000 UTC Warning Pod with-badger-d6f9945bc-mzdpq FailedMount MountVolume.SetUp failed for volume "with-badger-ui-oauth-proxy-tls" : secret "with-badger-ui-oauth-proxy-tls" not found kubelet logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:26:54 +0000 UTC Normal ReplicaSet.apps with-badger-d6f9945bc SuccessfulCreate Created pod: with-badger-d6f9945bc-mzdpq replicaset-controller logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:26:54 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-d6f9945bc to 1 deployment-controller logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:26:55 +0000 UTC Normal Pod with-badger-d6f9945bc-mzdpq AddedInterface Add eth0 [10.129.2.40/23] from ovn-kubernetes logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:26:55 +0000 UTC Normal Pod with-badger-d6f9945bc-mzdpq.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:26:55 +0000 UTC Normal Pod with-badger-d6f9945bc-mzdpq.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:26:55 +0000 UTC Normal Pod with-badger-d6f9945bc-mzdpq.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:26:55 +0000 UTC Normal Pod with-badger-d6f9945bc-mzdpq.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:26:55 +0000 UTC Normal Pod with-badger-d6f9945bc-mzdpq.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:26:55 +0000 UTC Normal Pod with-badger-d6f9945bc-mzdpq.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:27:51 +0000 UTC Normal Pod with-badger-d6f9945bc-mzdpq.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:27:51 +0000 UTC Normal Pod with-badger-d6f9945bc-mzdpq.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:27:51 +0000 UTC Normal ReplicaSet.apps with-badger-d6f9945bc SuccessfulDelete Deleted pod: with-badger-d6f9945bc-mzdpq replicaset-controller logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:27:51 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled down replica set with-badger-d6f9945bc to 0 from 1 deployment-controller logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:27:53 +0000 UTC Normal Pod with-badger-fbf6d9df-lpmjp Binding Scheduled Successfully assigned kuttl-test-enough-antelope/with-badger-fbf6d9df-lpmjp to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:27:53 +0000 UTC Normal Pod with-badger-fbf6d9df-lpmjp AddedInterface Add eth0 [10.129.2.41/23] from ovn-kubernetes logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:27:53 +0000 UTC Normal Pod with-badger-fbf6d9df-lpmjp.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:27:53 +0000 UTC Normal Pod with-badger-fbf6d9df-lpmjp.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:27:53 +0000 UTC Normal Pod with-badger-fbf6d9df-lpmjp.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:27:53 +0000 UTC Normal Pod with-badger-fbf6d9df-lpmjp.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:27:53 +0000 UTC Normal Pod with-badger-fbf6d9df-lpmjp.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:27:53 +0000 UTC Normal Pod with-badger-fbf6d9df-lpmjp.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:27:53 +0000 UTC Normal ReplicaSet.apps with-badger-fbf6d9df SuccessfulCreate Created pod: with-badger-fbf6d9df-lpmjp replicaset-controller logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:27:53 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-fbf6d9df to 1 deployment-controller logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:30:43 +0000 UTC Normal Pod check-span-627gm Binding Scheduled Successfully assigned kuttl-test-enough-antelope/check-span-627gm to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:30:43 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-627gm job-controller logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:30:43 +0000 UTC Normal Pod report-span-4pw26 Binding Scheduled Successfully assigned kuttl-test-enough-antelope/report-span-4pw26 to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:30:43 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-4pw26 job-controller logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:30:44 +0000 UTC Normal Pod check-span-627gm AddedInterface Add eth0 [10.128.2.59/23] from ovn-kubernetes logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:30:44 +0000 UTC Normal Pod check-span-627gm.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:30:44 +0000 UTC Normal Pod check-span-627gm.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:30:44 +0000 UTC Normal Pod check-span-627gm.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:30:44 +0000 UTC Normal Pod report-span-4pw26 AddedInterface Add eth0 [10.131.0.59/23] from ovn-kubernetes logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:30:44 +0000 UTC Normal Pod report-span-4pw26.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:30:44 +0000 UTC Normal Pod report-span-4pw26.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:30:44 +0000 UTC Normal Pod report-span-4pw26.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:30:55 | examples-with-badger | 2023-12-11 07:30:55 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:30:55 | examples-with-badger | Deleting namespace: kuttl-test-enough-antelope === CONT kuttl/harness/examples-simplest logger.go:42: 07:31:08 | examples-simplest | Creating namespace: kuttl-test-together-raven logger.go:42: 07:31:08 | examples-simplest/0-install | starting test step 0-install logger.go:42: 07:31:08 | examples-simplest/0-install | Jaeger:kuttl-test-together-raven/simplest created logger.go:42: 07:31:14 | examples-simplest/0-install | test step completed 0-install logger.go:42: 07:31:14 | examples-simplest/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:31:14 | examples-simplest/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 07:32:08 | examples-simplest/1-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:32:33 | examples-simplest/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:32:37 | examples-simplest/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:32:37 | examples-simplest/1-smoke-test | job.batch/report-span created logger.go:42: 07:32:37 | examples-simplest/1-smoke-test | job.batch/check-span created logger.go:42: 07:32:50 | examples-simplest/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:32:50 | examples-simplest | examples-simplest events from ns kuttl-test-together-raven: logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:31:11 +0000 UTC Normal Pod simplest-64bd948564-7428c Binding Scheduled Successfully assigned kuttl-test-together-raven/simplest-64bd948564-7428c to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:31:11 +0000 UTC Normal ReplicaSet.apps simplest-64bd948564 SuccessfulCreate Created pod: simplest-64bd948564-7428c replicaset-controller logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:31:11 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-64bd948564 to 1 deployment-controller logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:31:12 +0000 UTC Normal Pod simplest-64bd948564-7428c AddedInterface Add eth0 [10.129.2.42/23] from ovn-kubernetes logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:31:12 +0000 UTC Normal Pod simplest-64bd948564-7428c.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:31:12 +0000 UTC Normal Pod simplest-64bd948564-7428c.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:31:12 +0000 UTC Normal Pod simplest-64bd948564-7428c.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:31:12 +0000 UTC Normal Pod simplest-64bd948564-7428c.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:31:12 +0000 UTC Normal Pod simplest-64bd948564-7428c.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:31:12 +0000 UTC Normal Pod simplest-64bd948564-7428c.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:09 +0000 UTC Normal Pod simplest-64bd948564-7428c.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:09 +0000 UTC Normal Pod simplest-64bd948564-7428c.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:09 +0000 UTC Normal ReplicaSet.apps simplest-64bd948564 SuccessfulDelete Deleted pod: simplest-64bd948564-7428c replicaset-controller logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:09 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-64bd948564 to 0 from 1 deployment-controller logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:10 +0000 UTC Normal Pod simplest-57dff5c8fd-djzgd Binding Scheduled Successfully assigned kuttl-test-together-raven/simplest-57dff5c8fd-djzgd to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:10 +0000 UTC Normal ReplicaSet.apps simplest-57dff5c8fd SuccessfulCreate Created pod: simplest-57dff5c8fd-djzgd replicaset-controller logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:10 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-57dff5c8fd to 1 deployment-controller logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:11 +0000 UTC Normal Pod simplest-57dff5c8fd-djzgd AddedInterface Add eth0 [10.129.2.43/23] from ovn-kubernetes logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:11 +0000 UTC Normal Pod simplest-57dff5c8fd-djzgd.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:11 +0000 UTC Normal Pod simplest-57dff5c8fd-djzgd.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:11 +0000 UTC Normal Pod simplest-57dff5c8fd-djzgd.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:11 +0000 UTC Normal Pod simplest-57dff5c8fd-djzgd.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:11 +0000 UTC Normal Pod simplest-57dff5c8fd-djzgd.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:11 +0000 UTC Normal Pod simplest-57dff5c8fd-djzgd.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:37 +0000 UTC Normal Pod check-span-699wk Binding Scheduled Successfully assigned kuttl-test-together-raven/check-span-699wk to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:37 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-699wk job-controller logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:37 +0000 UTC Normal Pod report-span-jfxlh Binding Scheduled Successfully assigned kuttl-test-together-raven/report-span-jfxlh to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:37 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-jfxlh job-controller logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:38 +0000 UTC Normal Pod check-span-699wk AddedInterface Add eth0 [10.128.2.60/23] from ovn-kubernetes logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:38 +0000 UTC Normal Pod check-span-699wk.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:38 +0000 UTC Normal Pod check-span-699wk.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:38 +0000 UTC Normal Pod check-span-699wk.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:38 +0000 UTC Normal Pod report-span-jfxlh AddedInterface Add eth0 [10.131.0.60/23] from ovn-kubernetes logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:38 +0000 UTC Normal Pod report-span-jfxlh.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:38 +0000 UTC Normal Pod report-span-jfxlh.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:38 +0000 UTC Normal Pod report-span-jfxlh.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:32:50 | examples-simplest | 2023-12-11 07:32:49 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:32:50 | examples-simplest | Deleting namespace: kuttl-test-together-raven === CONT kuttl/harness/examples-simple-prod-with-volumes logger.go:42: 07:33:02 | examples-simple-prod-with-volumes | Ignoring 03-check-volume.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:33:07 | examples-simple-prod-with-volumes | Creating namespace: kuttl-test-emerging-reindeer logger.go:42: 07:33:07 | examples-simple-prod-with-volumes/1-install | starting test step 1-install logger.go:42: 07:33:07 | examples-simple-prod-with-volumes/1-install | Jaeger:kuttl-test-emerging-reindeer/simple-prod created logger.go:42: 07:33:42 | examples-simple-prod-with-volumes/1-install | test step completed 1-install logger.go:42: 07:33:42 | examples-simple-prod-with-volumes/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:33:42 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 07:34:40 | examples-simple-prod-with-volumes/2-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:35:40 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:35:40 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:37:31 | examples-simple-prod-with-volumes/2-smoke-test | job.batch/report-span created logger.go:42: 07:37:31 | examples-simple-prod-with-volumes/2-smoke-test | job.batch/check-span created logger.go:42: 07:37:42 | examples-simple-prod-with-volumes/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:37:42 | examples-simple-prod-with-volumes/3-check-volume | starting test step 3-check-volume logger.go:42: 07:37:42 | examples-simple-prod-with-volumes/3-check-volume | running command: [sh -c kubectl exec $(kubectl get pods -n $NAMESPACE -l app=jaeger -l app.kubernetes.io/component=collector -o yaml | /tmp/jaeger-tests/bin/yq e '.items[0].metadata.name') -n $NAMESPACE -- ls /usr/share/elasticsearch/data] logger.go:42: 07:37:43 | examples-simple-prod-with-volumes/3-check-volume | test step completed 3-check-volume logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | examples-simple-prod-with-volumes events from ns kuttl-test-emerging-reindeer: logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:13 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestemergingreindeersimpleprod-1-6c46d47fdb SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestemergingreindeersimpleprod-1-6c78c6z replicaset-controller logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestemergingreindeersimpleprod-1-6c78c6z Binding Scheduled Successfully assigned kuttl-test-emerging-reindeer/elasticsearch-cdm-kuttltestemergingreindeersimpleprod-1-6c78c6z to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:13 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestemergingreindeersimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestemergingreindeersimpleprod-1-6c46d47fdb to 1 deployment-controller logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestemergingreindeersimpleprod-1-6c78c6z AddedInterface Add eth0 [10.129.2.44/23] from ovn-kubernetes logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestemergingreindeersimpleprod-1-6c78c6z.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestemergingreindeersimpleprod-1-6c78c6z.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestemergingreindeersimpleprod-1-6c78c6z.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestemergingreindeersimpleprod-1-6c78c6z.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestemergingreindeersimpleprod-1-6c78c6z.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:14 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestemergingreindeersimpleprod-1-6c78c6z.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:24 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestemergingreindeersimpleprod-1-6c78c6z.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:29 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestemergingreindeersimpleprod-1-6c78c6z.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:40 +0000 UTC Normal Pod simple-prod-collector-6c8d96dd4d-pnvtx Binding Scheduled Successfully assigned kuttl-test-emerging-reindeer/simple-prod-collector-6c8d96dd4d-pnvtx to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:40 +0000 UTC Normal Pod simple-prod-collector-6c8d96dd4d-pnvtx AddedInterface Add eth0 [10.131.0.61/23] from ovn-kubernetes logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:40 +0000 UTC Normal Pod simple-prod-collector-6c8d96dd4d-pnvtx.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:40 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-6c8d96dd4d SuccessfulCreate Created pod: simple-prod-collector-6c8d96dd4d-pnvtx replicaset-controller logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:40 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-6c8d96dd4d to 1 deployment-controller logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:40 +0000 UTC Normal Pod simple-prod-query-6fc65f97f9-2x8sb Binding Scheduled Successfully assigned kuttl-test-emerging-reindeer/simple-prod-query-6fc65f97f9-2x8sb to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:40 +0000 UTC Normal Pod simple-prod-query-6fc65f97f9-2x8sb AddedInterface Add eth0 [10.128.2.61/23] from ovn-kubernetes logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:40 +0000 UTC Normal Pod simple-prod-query-6fc65f97f9-2x8sb.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:40 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6fc65f97f9 SuccessfulCreate Created pod: simple-prod-query-6fc65f97f9-2x8sb replicaset-controller logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:40 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-6fc65f97f9 to 1 deployment-controller logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:41 +0000 UTC Normal Pod simple-prod-collector-6c8d96dd4d-pnvtx.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:41 +0000 UTC Normal Pod simple-prod-collector-6c8d96dd4d-pnvtx.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:41 +0000 UTC Normal Pod simple-prod-query-6fc65f97f9-2x8sb.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:41 +0000 UTC Normal Pod simple-prod-query-6fc65f97f9-2x8sb.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:41 +0000 UTC Normal Pod simple-prod-query-6fc65f97f9-2x8sb.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:41 +0000 UTC Normal Pod simple-prod-query-6fc65f97f9-2x8sb.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:41 +0000 UTC Normal Pod simple-prod-query-6fc65f97f9-2x8sb.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:41 +0000 UTC Normal Pod simple-prod-query-6fc65f97f9-2x8sb.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:41 +0000 UTC Normal Pod simple-prod-query-6fc65f97f9-2x8sb.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:41 +0000 UTC Normal Pod simple-prod-query-6fc65f97f9-2x8sb.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:58 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:58 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:33:58 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:34:42 +0000 UTC Normal Pod simple-prod-query-686bc65dd6-q456k Binding Scheduled Successfully assigned kuttl-test-emerging-reindeer/simple-prod-query-686bc65dd6-q456k to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:34:42 +0000 UTC Normal ReplicaSet.apps simple-prod-query-686bc65dd6 SuccessfulCreate Created pod: simple-prod-query-686bc65dd6-q456k replicaset-controller logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:34:42 +0000 UTC Normal Pod simple-prod-query-6fc65f97f9-2x8sb.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:34:42 +0000 UTC Normal Pod simple-prod-query-6fc65f97f9-2x8sb.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:34:42 +0000 UTC Normal Pod simple-prod-query-6fc65f97f9-2x8sb.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:34:42 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6fc65f97f9 SuccessfulDelete Deleted pod: simple-prod-query-6fc65f97f9-2x8sb replicaset-controller logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:34:42 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-6fc65f97f9 to 0 from 1 deployment-controller logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:34:42 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-686bc65dd6 to 1 deployment-controller logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:34:43 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:34:43 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod simple-prod-collector-6c8d96dd4d-pnvtx horizontal-pod-autoscaler logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:34:43 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:34:43 +0000 UTC Normal Pod simple-prod-query-686bc65dd6-q456k AddedInterface Add eth0 [10.128.2.62/23] from ovn-kubernetes logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:34:43 +0000 UTC Normal Pod simple-prod-query-686bc65dd6-q456k.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:34:43 +0000 UTC Normal Pod simple-prod-query-686bc65dd6-q456k.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:34:43 +0000 UTC Normal Pod simple-prod-query-686bc65dd6-q456k.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:34:43 +0000 UTC Normal Pod simple-prod-query-686bc65dd6-q456k.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:34:43 +0000 UTC Normal Pod simple-prod-query-686bc65dd6-q456k.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:34:43 +0000 UTC Normal Pod simple-prod-query-686bc65dd6-q456k.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:34:43 +0000 UTC Normal Pod simple-prod-query-686bc65dd6-q456k.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:34:43 +0000 UTC Normal Pod simple-prod-query-686bc65dd6-q456k.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:34:43 +0000 UTC Normal Pod simple-prod-query-686bc65dd6-q456k.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:37:31 +0000 UTC Normal Pod check-span-wrqvk Binding Scheduled Successfully assigned kuttl-test-emerging-reindeer/check-span-wrqvk to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:37:31 +0000 UTC Normal Pod check-span-wrqvk AddedInterface Add eth0 [10.131.0.63/23] from ovn-kubernetes logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:37:31 +0000 UTC Normal Pod check-span-wrqvk.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:37:31 +0000 UTC Normal Pod check-span-wrqvk.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:37:31 +0000 UTC Normal Pod check-span-wrqvk.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:37:31 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-wrqvk job-controller logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:37:31 +0000 UTC Normal Pod report-span-zxvlh Binding Scheduled Successfully assigned kuttl-test-emerging-reindeer/report-span-zxvlh to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:37:31 +0000 UTC Normal Pod report-span-zxvlh AddedInterface Add eth0 [10.131.0.62/23] from ovn-kubernetes logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:37:31 +0000 UTC Normal Pod report-span-zxvlh.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:37:31 +0000 UTC Normal Pod report-span-zxvlh.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:37:31 +0000 UTC Normal Pod report-span-zxvlh.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:37:31 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-zxvlh job-controller logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | 2023-12-11 07:37:42 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:37:43 | examples-simple-prod-with-volumes | Deleting namespace: kuttl-test-emerging-reindeer === CONT kuttl/harness/examples-simple-prod logger.go:42: 07:39:14 | examples-simple-prod | Creating namespace: kuttl-test-wealthy-sunbeam logger.go:42: 07:39:14 | examples-simple-prod/1-install | starting test step 1-install logger.go:42: 07:39:14 | examples-simple-prod/1-install | Jaeger:kuttl-test-wealthy-sunbeam/simple-prod created logger.go:42: 07:39:50 | examples-simple-prod/1-install | test step completed 1-install logger.go:42: 07:39:50 | examples-simple-prod/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:39:50 | examples-simple-prod/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 07:41:12 | examples-simple-prod/2-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:42:13 | examples-simple-prod/2-smoke-test | Unable to connect to the server: context deadline exceeded (Client.Timeout exceeded while awaiting headers) logger.go:42: 07:42:13 | examples-simple-prod/2-smoke-test | command failure, skipping 2 additional commands case.go:364: failed in step 2-smoke-test case.go:366: exit status 1 logger.go:42: 07:42:13 | examples-simple-prod | examples-simple-prod events from ns kuttl-test-wealthy-sunbeam: logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:21 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestwealthysunbeamsimpleprod-1-66f878c8b4 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestwealthysunbeamsimpleprod-1-66f8wgwd6 replicaset-controller logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:21 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestwealthysunbeamsimpleprod-1-66f8wgwd6 Binding Scheduled Successfully assigned kuttl-test-wealthy-sunbeam/elasticsearch-cdm-kuttltestwealthysunbeamsimpleprod-1-66f8wgwd6 to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:21 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestwealthysunbeamsimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestwealthysunbeamsimpleprod-1-66f878c8b4 to 1 deployment-controller logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestwealthysunbeamsimpleprod-1-66f8wgwd6 AddedInterface Add eth0 [10.129.2.45/23] from ovn-kubernetes logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestwealthysunbeamsimpleprod-1-66f8wgwd6.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestwealthysunbeamsimpleprod-1-66f8wgwd6.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestwealthysunbeamsimpleprod-1-66f8wgwd6.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestwealthysunbeamsimpleprod-1-66f8wgwd6.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestwealthysunbeamsimpleprod-1-66f8wgwd6.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestwealthysunbeamsimpleprod-1-66f8wgwd6.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:32 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestwealthysunbeamsimpleprod-1-66f8wgwd6.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:37 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestwealthysunbeamsimpleprod-1-66f8wgwd6.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:48 +0000 UTC Normal Pod simple-prod-collector-5589475d85-8jd8l Binding Scheduled Successfully assigned kuttl-test-wealthy-sunbeam/simple-prod-collector-5589475d85-8jd8l to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:48 +0000 UTC Normal Pod simple-prod-collector-5589475d85-8jd8l AddedInterface Add eth0 [10.131.0.64/23] from ovn-kubernetes logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:48 +0000 UTC Normal Pod simple-prod-collector-5589475d85-8jd8l.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:48 +0000 UTC Normal Pod simple-prod-collector-5589475d85-8jd8l.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:48 +0000 UTC Normal Pod simple-prod-collector-5589475d85-8jd8l.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:48 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5589475d85 SuccessfulCreate Created pod: simple-prod-collector-5589475d85-8jd8l replicaset-controller logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:48 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-5589475d85 to 1 deployment-controller logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:48 +0000 UTC Normal Pod simple-prod-query-9d6956bdf-2gnj2 Binding Scheduled Successfully assigned kuttl-test-wealthy-sunbeam/simple-prod-query-9d6956bdf-2gnj2 to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:48 +0000 UTC Normal Pod simple-prod-query-9d6956bdf-2gnj2 AddedInterface Add eth0 [10.128.2.63/23] from ovn-kubernetes logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:48 +0000 UTC Normal Pod simple-prod-query-9d6956bdf-2gnj2.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:48 +0000 UTC Normal ReplicaSet.apps simple-prod-query-9d6956bdf SuccessfulCreate Created pod: simple-prod-query-9d6956bdf-2gnj2 replicaset-controller logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:48 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-9d6956bdf to 1 deployment-controller logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:49 +0000 UTC Normal Pod simple-prod-query-9d6956bdf-2gnj2.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:49 +0000 UTC Normal Pod simple-prod-query-9d6956bdf-2gnj2.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:49 +0000 UTC Normal Pod simple-prod-query-9d6956bdf-2gnj2.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:49 +0000 UTC Normal Pod simple-prod-query-9d6956bdf-2gnj2.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:49 +0000 UTC Normal Pod simple-prod-query-9d6956bdf-2gnj2.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:49 +0000 UTC Normal Pod simple-prod-query-9d6956bdf-2gnj2.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:49 +0000 UTC Normal Pod simple-prod-query-9d6956bdf-2gnj2.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:39:49 +0000 UTC Normal Pod simple-prod-query-9d6956bdf-2gnj2.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:40:03 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:40:03 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:40:03 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:40:33 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:40:33 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod simple-prod-collector-5589475d85-8jd8l horizontal-pod-autoscaler logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:40:33 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:41:14 +0000 UTC Normal Pod simple-prod-query-589f896bc5-h2bgr Binding Scheduled Successfully assigned kuttl-test-wealthy-sunbeam/simple-prod-query-589f896bc5-h2bgr to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:41:14 +0000 UTC Normal ReplicaSet.apps simple-prod-query-589f896bc5 SuccessfulCreate Created pod: simple-prod-query-589f896bc5-h2bgr replicaset-controller logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:41:14 +0000 UTC Normal Pod simple-prod-query-9d6956bdf-2gnj2.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:41:14 +0000 UTC Normal Pod simple-prod-query-9d6956bdf-2gnj2.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:41:14 +0000 UTC Normal Pod simple-prod-query-9d6956bdf-2gnj2.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:41:14 +0000 UTC Normal ReplicaSet.apps simple-prod-query-9d6956bdf SuccessfulDelete Deleted pod: simple-prod-query-9d6956bdf-2gnj2 replicaset-controller logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:41:14 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-9d6956bdf to 0 from 1 deployment-controller logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:41:14 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-589f896bc5 to 1 deployment-controller logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:41:15 +0000 UTC Normal Pod simple-prod-query-589f896bc5-h2bgr AddedInterface Add eth0 [10.131.0.65/23] from ovn-kubernetes logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:41:15 +0000 UTC Normal Pod simple-prod-query-589f896bc5-h2bgr.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:41:15 +0000 UTC Normal Pod simple-prod-query-589f896bc5-h2bgr.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:41:15 +0000 UTC Normal Pod simple-prod-query-589f896bc5-h2bgr.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:41:15 +0000 UTC Normal Pod simple-prod-query-589f896bc5-h2bgr.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:41:15 +0000 UTC Normal Pod simple-prod-query-589f896bc5-h2bgr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:41:15 +0000 UTC Normal Pod simple-prod-query-589f896bc5-h2bgr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:41:15 +0000 UTC Normal Pod simple-prod-query-589f896bc5-h2bgr.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:41:15 +0000 UTC Normal Pod simple-prod-query-589f896bc5-h2bgr.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:42:13 | examples-simple-prod | 2023-12-11 07:41:15 +0000 UTC Normal Pod simple-prod-query-589f896bc5-h2bgr.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:42:13 | examples-simple-prod | Deleting namespace: kuttl-test-wealthy-sunbeam === CONT kuttl/harness/examples-business-application-injected-sidecar logger.go:42: 07:42:19 | examples-business-application-injected-sidecar | Creating namespace: kuttl-test-settling-polecat logger.go:42: 07:42:19 | examples-business-application-injected-sidecar/0-install | starting test step 0-install logger.go:42: 07:42:19 | examples-business-application-injected-sidecar/0-install | Deployment:kuttl-test-settling-polecat/myapp created logger.go:42: 07:42:19 | examples-business-application-injected-sidecar/0-install | test step completed 0-install logger.go:42: 07:42:19 | examples-business-application-injected-sidecar/1-install | starting test step 1-install logger.go:42: 07:42:20 | examples-business-application-injected-sidecar/1-install | Jaeger:kuttl-test-settling-polecat/simplest created logger.go:42: 07:42:31 | examples-business-application-injected-sidecar/1-install | test step completed 1-install logger.go:42: 07:42:31 | examples-business-application-injected-sidecar/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:42:31 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 07:44:00 | examples-business-application-injected-sidecar/2-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:44:06 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:44:07 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:44:07 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/report-span created logger.go:42: 07:44:07 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/check-span created logger.go:42: 07:44:19 | examples-business-application-injected-sidecar/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | examples-business-application-injected-sidecar events from ns kuttl-test-settling-polecat: logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:19 +0000 UTC Normal ReplicaSet.apps myapp-679f79d5f8 SuccessfulCreate Created pod: myapp-679f79d5f8-vrw76 replicaset-controller logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:19 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-679f79d5f8 to 1 deployment-controller logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:20 +0000 UTC Normal Pod myapp-679f79d5f8-vrw76 Binding Scheduled Successfully assigned kuttl-test-settling-polecat/myapp-679f79d5f8-vrw76 to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:20 +0000 UTC Normal Pod myapp-679f79d5f8-vrw76 AddedInterface Add eth0 [10.129.2.46/23] from ovn-kubernetes logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:20 +0000 UTC Normal Pod myapp-679f79d5f8-vrw76.spec.containers{myapp} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:20 +0000 UTC Normal Pod myapp-85f7d64b4d-z99mb Binding Scheduled Successfully assigned kuttl-test-settling-polecat/myapp-85f7d64b4d-z99mb to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:20 +0000 UTC Normal ReplicaSet.apps myapp-85f7d64b4d SuccessfulCreate Created pod: myapp-85f7d64b4d-z99mb replicaset-controller logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:20 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-85f7d64b4d to 1 deployment-controller logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:21 +0000 UTC Warning Pod myapp-85f7d64b4d-z99mb FailedMount MountVolume.SetUp failed for volume "simplest-service-ca" : configmap "simplest-service-ca" not found kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:21 +0000 UTC Warning Pod myapp-85f7d64b4d-z99mb FailedMount MountVolume.SetUp failed for volume "simplest-trusted-ca" : configmap "simplest-trusted-ca" not found kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:24 +0000 UTC Normal Pod myapp-679f79d5f8-vrw76.spec.containers{myapp} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.723s (3.723s including waiting) kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:24 +0000 UTC Normal Pod myapp-679f79d5f8-vrw76.spec.containers{myapp} Created Created container myapp kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:24 +0000 UTC Normal Pod myapp-679f79d5f8-vrw76.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:28 +0000 UTC Normal Pod myapp-85f7d64b4d-z99mb AddedInterface Add eth0 [10.131.0.66/23] from ovn-kubernetes logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:28 +0000 UTC Normal Pod myapp-85f7d64b4d-z99mb.spec.containers{myapp} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:28 +0000 UTC Normal Pod simplest-7f9568db6b-xd7nh Binding Scheduled Successfully assigned kuttl-test-settling-polecat/simplest-7f9568db6b-xd7nh to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:28 +0000 UTC Normal Pod simplest-7f9568db6b-xd7nh AddedInterface Add eth0 [10.129.2.47/23] from ovn-kubernetes logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:28 +0000 UTC Normal ReplicaSet.apps simplest-7f9568db6b SuccessfulCreate Created pod: simplest-7f9568db6b-xd7nh replicaset-controller logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:28 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-7f9568db6b to 1 deployment-controller logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:29 +0000 UTC Normal Pod simplest-7f9568db6b-xd7nh.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:29 +0000 UTC Normal Pod simplest-7f9568db6b-xd7nh.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:29 +0000 UTC Normal Pod simplest-7f9568db6b-xd7nh.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:29 +0000 UTC Normal Pod simplest-7f9568db6b-xd7nh.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:29 +0000 UTC Normal Pod simplest-7f9568db6b-xd7nh.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:29 +0000 UTC Normal Pod simplest-7f9568db6b-xd7nh.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:31 +0000 UTC Warning Pod myapp-679f79d5f8-vrw76.spec.containers{myapp} Unhealthy Liveness probe failed: Get "http://10.129.2.46:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:32 +0000 UTC Normal Pod myapp-85f7d64b4d-z99mb.spec.containers{myapp} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.798s (3.798s including waiting) kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:32 +0000 UTC Normal Pod myapp-85f7d64b4d-z99mb.spec.containers{myapp} Created Created container myapp kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:32 +0000 UTC Normal Pod myapp-85f7d64b4d-z99mb.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:32 +0000 UTC Normal Pod myapp-85f7d64b4d-z99mb.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:32 +0000 UTC Normal Pod myapp-85f7d64b4d-z99mb.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:32 +0000 UTC Normal Pod myapp-85f7d64b4d-z99mb.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:33 +0000 UTC Normal Pod myapp-679f79d5f8-vrw76.spec.containers{myapp} Killing Stopping container myapp kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:33 +0000 UTC Normal ReplicaSet.apps myapp-679f79d5f8 SuccessfulDelete Deleted pod: myapp-679f79d5f8-vrw76 replicaset-controller logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:33 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled down replica set myapp-679f79d5f8 to 0 from 1 deployment-controller logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:39 +0000 UTC Warning Pod myapp-85f7d64b4d-z99mb.spec.containers{myapp} Unhealthy Liveness probe failed: Get "http://10.131.0.66:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:42:59 +0000 UTC Normal Pod myapp-85f7d64b4d-z99mb.spec.containers{myapp} Killing Container myapp failed liveness probe, will be restarted kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:43:00 +0000 UTC Normal Pod myapp-85f7d64b4d-z99mb.spec.containers{myapp} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:01 +0000 UTC Normal Pod simplest-7f9568db6b-xd7nh.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:01 +0000 UTC Normal Pod simplest-7f9568db6b-xd7nh.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:01 +0000 UTC Normal ReplicaSet.apps simplest-7f9568db6b SuccessfulDelete Deleted pod: simplest-7f9568db6b-xd7nh replicaset-controller logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:01 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-7f9568db6b to 0 from 1 deployment-controller logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:02 +0000 UTC Normal Pod simplest-6f9d6db67d-rl7k8 Binding Scheduled Successfully assigned kuttl-test-settling-polecat/simplest-6f9d6db67d-rl7k8 to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:02 +0000 UTC Normal Pod simplest-6f9d6db67d-rl7k8 AddedInterface Add eth0 [10.129.2.48/23] from ovn-kubernetes logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:02 +0000 UTC Normal Pod simplest-6f9d6db67d-rl7k8.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:02 +0000 UTC Normal ReplicaSet.apps simplest-6f9d6db67d SuccessfulCreate Created pod: simplest-6f9d6db67d-rl7k8 replicaset-controller logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:02 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-6f9d6db67d to 1 deployment-controller logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:03 +0000 UTC Normal Pod simplest-6f9d6db67d-rl7k8.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:03 +0000 UTC Normal Pod simplest-6f9d6db67d-rl7k8.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:03 +0000 UTC Normal Pod simplest-6f9d6db67d-rl7k8.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:03 +0000 UTC Normal Pod simplest-6f9d6db67d-rl7k8.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:03 +0000 UTC Normal Pod simplest-6f9d6db67d-rl7k8.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:07 +0000 UTC Normal Pod check-span-kbrnm Binding Scheduled Successfully assigned kuttl-test-settling-polecat/check-span-kbrnm to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:07 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-kbrnm job-controller logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:07 +0000 UTC Normal Pod report-span-6j696 Binding Scheduled Successfully assigned kuttl-test-settling-polecat/report-span-6j696 to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:07 +0000 UTC Normal Pod report-span-6j696 AddedInterface Add eth0 [10.128.2.64/23] from ovn-kubernetes logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:07 +0000 UTC Normal Pod report-span-6j696.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:07 +0000 UTC Normal Pod report-span-6j696.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:07 +0000 UTC Normal Pod report-span-6j696.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:07 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-6j696 job-controller logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:08 +0000 UTC Normal Pod check-span-kbrnm AddedInterface Add eth0 [10.128.2.65/23] from ovn-kubernetes logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:08 +0000 UTC Normal Pod check-span-kbrnm.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:08 +0000 UTC Normal Pod check-span-kbrnm.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:08 +0000 UTC Normal Pod check-span-kbrnm.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | 2023-12-11 07:44:19 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:44:19 | examples-business-application-injected-sidecar | Deleting namespace: kuttl-test-settling-polecat === CONT kuttl/harness/examples-openshift-with-htpasswd logger.go:42: 07:44:31 | examples-openshift-with-htpasswd | Ignoring 00-install.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:44:31 | examples-openshift-with-htpasswd | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:44:36 | examples-openshift-with-htpasswd | Creating namespace: kuttl-test-relieved-kiwi logger.go:42: 07:44:36 | examples-openshift-with-htpasswd/0-install | starting test step 0-install logger.go:42: 07:44:36 | examples-openshift-with-htpasswd/0-install | Secret:kuttl-test-relieved-kiwi/htpasswd created logger.go:42: 07:44:36 | examples-openshift-with-htpasswd/0-install | test step completed 0-install logger.go:42: 07:44:36 | examples-openshift-with-htpasswd/1-install | starting test step 1-install logger.go:42: 07:44:36 | examples-openshift-with-htpasswd/1-install | Jaeger:kuttl-test-relieved-kiwi/with-htpasswd created logger.go:42: 07:44:42 | examples-openshift-with-htpasswd/1-install | test step completed 1-install logger.go:42: 07:44:42 | examples-openshift-with-htpasswd/2-check-unsecured | starting test step 2-check-unsecured logger.go:42: 07:44:42 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [./ensure-ingress-host.sh] logger.go:42: 07:44:42 | examples-openshift-with-htpasswd/2-check-unsecured | Checking the Ingress host value was populated logger.go:42: 07:44:42 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 0 logger.go:42: 07:44:42 | examples-openshift-with-htpasswd/2-check-unsecured | Hostname is with-htpasswd-kuttl-test-relieved-kiwi.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com logger.go:42: 07:44:42 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 07:44:42 | examples-openshift-with-htpasswd/2-check-unsecured | Checking an expected HTTP response logger.go:42: 07:44:42 | examples-openshift-with-htpasswd/2-check-unsecured | Running in OpenShift logger.go:42: 07:44:42 | examples-openshift-with-htpasswd/2-check-unsecured | Not using any secret logger.go:42: 07:44:42 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 1/30 the https://with-htpasswd-kuttl-test-relieved-kiwi.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:44:43 | examples-openshift-with-htpasswd/2-check-unsecured | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:44:43 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 2/30 the https://with-htpasswd-kuttl-test-relieved-kiwi.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:44:43 | examples-openshift-with-htpasswd/2-check-unsecured | HTTP response is 503. 403 expected. Waiting 10 s logger.go:42: 07:44:53 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 3/30 the https://with-htpasswd-kuttl-test-relieved-kiwi.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/2-check-unsecured | curl response asserted properly logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/2-check-unsecured | test step completed 2-check-unsecured logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/3-check-unauthorized | starting test step 3-check-unauthorized logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [./ensure-ingress-host.sh] logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking the Ingress host value was populated logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 0 logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/3-check-unauthorized | Hostname is with-htpasswd-kuttl-test-relieved-kiwi.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [sh -c JAEGER_USERNAME=wronguser JAEGER_PASSWORD=wrongpassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking an expected HTTP response logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/3-check-unauthorized | Running in OpenShift logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/3-check-unauthorized | Using Jaeger basic authentication logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 1/30 the https://with-htpasswd-kuttl-test-relieved-kiwi.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/3-check-unauthorized | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 2/30 the https://with-htpasswd-kuttl-test-relieved-kiwi.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/3-check-unauthorized | curl response asserted properly logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/3-check-unauthorized | test step completed 3-check-unauthorized logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/4-check-authorized | starting test step 4-check-authorized logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/4-check-authorized | running command: [./ensure-ingress-host.sh] logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/4-check-authorized | Checking the Ingress host value was populated logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/4-check-authorized | Try number 0 logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/4-check-authorized | Hostname is with-htpasswd-kuttl-test-relieved-kiwi.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/4-check-authorized | running command: [sh -c JAEGER_USERNAME=awesomeuser JAEGER_PASSWORD=awesomepassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE with-htpasswd] logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/4-check-authorized | Checking an expected HTTP response logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/4-check-authorized | Running in OpenShift logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/4-check-authorized | Using Jaeger basic authentication logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/4-check-authorized | Try number 1/30 the https://with-htpasswd-kuttl-test-relieved-kiwi.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/4-check-authorized | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/4-check-authorized | Try number 2/30 the https://with-htpasswd-kuttl-test-relieved-kiwi.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/4-check-authorized | curl response asserted properly logger.go:42: 07:44:54 | examples-openshift-with-htpasswd/4-check-authorized | test step completed 4-check-authorized logger.go:42: 07:44:54 | examples-openshift-with-htpasswd | examples-openshift-with-htpasswd events from ns kuttl-test-relieved-kiwi: logger.go:42: 07:44:54 | examples-openshift-with-htpasswd | 2023-12-11 07:44:39 +0000 UTC Normal Pod with-htpasswd-668d7d84c9-b26z4 Binding Scheduled Successfully assigned kuttl-test-relieved-kiwi/with-htpasswd-668d7d84c9-b26z4 to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:44:54 | examples-openshift-with-htpasswd | 2023-12-11 07:44:39 +0000 UTC Normal ReplicaSet.apps with-htpasswd-668d7d84c9 SuccessfulCreate Created pod: with-htpasswd-668d7d84c9-b26z4 replicaset-controller logger.go:42: 07:44:54 | examples-openshift-with-htpasswd | 2023-12-11 07:44:39 +0000 UTC Normal Deployment.apps with-htpasswd ScalingReplicaSet Scaled up replica set with-htpasswd-668d7d84c9 to 1 deployment-controller logger.go:42: 07:44:54 | examples-openshift-with-htpasswd | 2023-12-11 07:44:40 +0000 UTC Normal Pod with-htpasswd-668d7d84c9-b26z4 AddedInterface Add eth0 [10.129.2.49/23] from ovn-kubernetes logger.go:42: 07:44:54 | examples-openshift-with-htpasswd | 2023-12-11 07:44:40 +0000 UTC Normal Pod with-htpasswd-668d7d84c9-b26z4.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:44:54 | examples-openshift-with-htpasswd | 2023-12-11 07:44:40 +0000 UTC Normal Pod with-htpasswd-668d7d84c9-b26z4.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:44:54 | examples-openshift-with-htpasswd | 2023-12-11 07:44:40 +0000 UTC Normal Pod with-htpasswd-668d7d84c9-b26z4.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:44:54 | examples-openshift-with-htpasswd | 2023-12-11 07:44:40 +0000 UTC Normal Pod with-htpasswd-668d7d84c9-b26z4.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:44:54 | examples-openshift-with-htpasswd | 2023-12-11 07:44:40 +0000 UTC Normal Pod with-htpasswd-668d7d84c9-b26z4.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:44:54 | examples-openshift-with-htpasswd | 2023-12-11 07:44:40 +0000 UTC Normal Pod with-htpasswd-668d7d84c9-b26z4.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:44:54 | examples-openshift-with-htpasswd | Deleting namespace: kuttl-test-relieved-kiwi === CONT kuttl/harness/examples-openshift-agent-as-daemonset logger.go:42: 07:45:00 | examples-openshift-agent-as-daemonset | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:45:02 | examples-openshift-agent-as-daemonset | Creating namespace: kuttl-test-sweeping-gnat logger.go:42: 07:45:02 | examples-openshift-agent-as-daemonset/0-install | starting test step 0-install logger.go:42: 07:45:02 | examples-openshift-agent-as-daemonset/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 07:45:02 | examples-openshift-agent-as-daemonset/0-install | ServiceAccount:kuttl-test-sweeping-gnat/jaeger-agent-daemonset created logger.go:42: 07:45:02 | examples-openshift-agent-as-daemonset/0-install | test step completed 0-install logger.go:42: 07:45:02 | examples-openshift-agent-as-daemonset/1-add-policy | starting test step 1-add-policy logger.go:42: 07:45:02 | examples-openshift-agent-as-daemonset/1-add-policy | running command: [sh -c oc adm policy --namespace $NAMESPACE add-scc-to-user daemonset-with-hostport -z jaeger-agent-daemonset] logger.go:42: 07:45:02 | examples-openshift-agent-as-daemonset/1-add-policy | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:daemonset-with-hostport added: "jaeger-agent-daemonset" logger.go:42: 07:45:02 | examples-openshift-agent-as-daemonset/1-add-policy | running command: [sh -c sleep 5] logger.go:42: 07:45:07 | examples-openshift-agent-as-daemonset/1-add-policy | test step completed 1-add-policy logger.go:42: 07:45:07 | examples-openshift-agent-as-daemonset/2-install | starting test step 2-install logger.go:42: 07:45:08 | examples-openshift-agent-as-daemonset/2-install | Jaeger:kuttl-test-sweeping-gnat/agent-as-daemonset created logger.go:42: 07:45:13 | examples-openshift-agent-as-daemonset/2-install | test step completed 2-install logger.go:42: 07:45:13 | examples-openshift-agent-as-daemonset/3-install | starting test step 3-install logger.go:42: 07:45:13 | examples-openshift-agent-as-daemonset/3-install | Deployment:kuttl-test-sweeping-gnat/vertx-create-span-sidecar created logger.go:42: 07:45:14 | examples-openshift-agent-as-daemonset/3-install | test step completed 3-install logger.go:42: 07:45:14 | examples-openshift-agent-as-daemonset/4-find-service | starting test step 4-find-service logger.go:42: 07:45:14 | examples-openshift-agent-as-daemonset/4-find-service | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 07:46:35 | examples-openshift-agent-as-daemonset/4-find-service | Warning: resource jaegers/agent-as-daemonset is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:46:42 | examples-openshift-agent-as-daemonset/4-find-service | running command: [sh -c SERVICE_NAME=order ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a JOB_NUMBER=00 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o find-service-00-job.yaml] logger.go:42: 07:46:44 | examples-openshift-agent-as-daemonset/4-find-service | running command: [sh -c kubectl create -f find-service-00-job.yaml -n $NAMESPACE] logger.go:42: 07:48:03 | examples-openshift-agent-as-daemonset/4-find-service | job.batch/00-find-service created logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset/4-find-service | test step completed 4-find-service logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | examples-openshift-agent-as-daemonset events from ns kuttl-test-sweeping-gnat: logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:11 +0000 UTC Normal Pod agent-as-daemonset-7b867656dc-46vtt Binding Scheduled Successfully assigned kuttl-test-sweeping-gnat/agent-as-daemonset-7b867656dc-46vtt to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:11 +0000 UTC Normal Pod agent-as-daemonset-7b867656dc-46vtt AddedInterface Add eth0 [10.129.2.50/23] from ovn-kubernetes logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:11 +0000 UTC Normal Pod agent-as-daemonset-7b867656dc-46vtt.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:11 +0000 UTC Normal Pod agent-as-daemonset-7b867656dc-46vtt.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:11 +0000 UTC Normal Pod agent-as-daemonset-7b867656dc-46vtt.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:11 +0000 UTC Normal Pod agent-as-daemonset-7b867656dc-46vtt.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:11 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-7b867656dc SuccessfulCreate Created pod: agent-as-daemonset-7b867656dc-46vtt replicaset-controller logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:11 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-7b867656dc to 1 deployment-controller logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:12 +0000 UTC Normal Pod agent-as-daemonset-7b867656dc-46vtt.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:12 +0000 UTC Normal Pod agent-as-daemonset-7b867656dc-46vtt.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:13 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-fbmbv Binding Scheduled Successfully assigned kuttl-test-sweeping-gnat/agent-as-daemonset-agent-daemonset-fbmbv to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:13 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-fbmbv AddedInterface Add eth0 [10.128.2.66/23] from ovn-kubernetes logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:13 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-htbcp Binding Scheduled Successfully assigned kuttl-test-sweeping-gnat/agent-as-daemonset-agent-daemonset-htbcp to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:13 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-htbcp AddedInterface Add eth0 [10.129.2.51/23] from ovn-kubernetes logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:13 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-k675j Binding Scheduled Successfully assigned kuttl-test-sweeping-gnat/agent-as-daemonset-agent-daemonset-k675j to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:13 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-k675j AddedInterface Add eth0 [10.131.0.69/23] from ovn-kubernetes logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:13 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-htbcp daemonset-controller logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:13 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-k675j daemonset-controller logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:13 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-fbmbv daemonset-controller logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:13 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-9p56l Binding Scheduled Successfully assigned kuttl-test-sweeping-gnat/vertx-create-span-sidecar-6c569f6fc6-9p56l to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:13 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-6c569f6fc6 SuccessfulCreate Created pod: vertx-create-span-sidecar-6c569f6fc6-9p56l replicaset-controller logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:13 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-6c569f6fc6 to 1 deployment-controller logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:14 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-fbmbv.spec.containers{jaeger-agent-daemonset} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:14 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-fbmbv.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:14 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-fbmbv.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:14 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-htbcp.spec.containers{jaeger-agent-daemonset} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:14 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-htbcp.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:14 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-htbcp.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:14 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-k675j.spec.containers{jaeger-agent-daemonset} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:14 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-k675j.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:14 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-k675j.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:14 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-9p56l AddedInterface Add eth0 [10.131.0.68/23] from ovn-kubernetes logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:14 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-9p56l.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:14 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-9p56l.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:14 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-9p56l.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:23 +0000 UTC Warning Pod vertx-create-span-sidecar-6c569f6fc6-9p56l.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.68:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:23 +0000 UTC Warning Pod vertx-create-span-sidecar-6c569f6fc6-9p56l.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.68:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:25 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-9p56l.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:25 +0000 UTC Warning Pod vertx-create-span-sidecar-6c569f6fc6-9p56l.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.68:8080/": read tcp 10.131.0.2:35414->10.131.0.68:8080: read: connection reset by peer kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:25 +0000 UTC Warning Pod vertx-create-span-sidecar-6c569f6fc6-9p56l.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.68:8080/": dial tcp 10.131.0.68:8080: connect: connection refused kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:45:36 +0000 UTC Warning Pod vertx-create-span-sidecar-6c569f6fc6-9p56l.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.68:8080/": read tcp 10.131.0.2:60992->10.131.0.68:8080: read: connection reset by peer kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:46:36 +0000 UTC Normal Pod agent-as-daemonset-7b867656dc-46vtt.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:46:36 +0000 UTC Normal Pod agent-as-daemonset-7b867656dc-46vtt.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:46:36 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-7b867656dc SuccessfulDelete Deleted pod: agent-as-daemonset-7b867656dc-46vtt replicaset-controller logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:46:36 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled down replica set agent-as-daemonset-7b867656dc to 0 from 1 deployment-controller logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:46:37 +0000 UTC Normal Pod agent-as-daemonset-7cddb64c57-6m4ct Binding Scheduled Successfully assigned kuttl-test-sweeping-gnat/agent-as-daemonset-7cddb64c57-6m4ct to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:46:37 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-7cddb64c57 SuccessfulCreate Created pod: agent-as-daemonset-7cddb64c57-6m4ct replicaset-controller logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:46:37 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-7cddb64c57 to 1 deployment-controller logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:46:38 +0000 UTC Normal Pod agent-as-daemonset-7cddb64c57-6m4ct AddedInterface Add eth0 [10.129.2.52/23] from ovn-kubernetes logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:46:38 +0000 UTC Normal Pod agent-as-daemonset-7cddb64c57-6m4ct.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:46:38 +0000 UTC Normal Pod agent-as-daemonset-7cddb64c57-6m4ct.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:46:38 +0000 UTC Normal Pod agent-as-daemonset-7cddb64c57-6m4ct.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:46:38 +0000 UTC Normal Pod agent-as-daemonset-7cddb64c57-6m4ct.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:46:38 +0000 UTC Normal Pod agent-as-daemonset-7cddb64c57-6m4ct.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:46:38 +0000 UTC Normal Pod agent-as-daemonset-7cddb64c57-6m4ct.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:48:03 +0000 UTC Normal Pod 00-find-service-gjff9 Binding Scheduled Successfully assigned kuttl-test-sweeping-gnat/00-find-service-gjff9 to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:48:03 +0000 UTC Normal Pod 00-find-service-gjff9 AddedInterface Add eth0 [10.128.2.67/23] from ovn-kubernetes logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:48:03 +0000 UTC Normal Pod 00-find-service-gjff9.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:48:03 +0000 UTC Normal Pod 00-find-service-gjff9.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:48:03 +0000 UTC Normal Pod 00-find-service-gjff9.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:48:03 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-gjff9 job-controller logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | 2023-12-11 07:48:06 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 07:48:06 | examples-openshift-agent-as-daemonset | Deleting namespace: kuttl-test-sweeping-gnat === CONT kuttl/harness/examples-collector-with-priority-class logger.go:42: 07:48:18 | examples-collector-with-priority-class | Creating namespace: kuttl-test-assuring-eagle logger.go:42: 07:48:18 | examples-collector-with-priority-class/0-install | starting test step 0-install logger.go:42: 07:48:18 | examples-collector-with-priority-class/0-install | PriorityClass:/collector-high-priority created logger.go:42: 07:48:18 | examples-collector-with-priority-class/0-install | Jaeger:kuttl-test-assuring-eagle/collector-with-high-priority created logger.go:42: 07:48:23 | examples-collector-with-priority-class/0-install | test step completed 0-install logger.go:42: 07:48:23 | examples-collector-with-priority-class/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:48:23 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE collector-with-high-priority /dev/null] logger.go:42: 07:48:24 | examples-collector-with-priority-class/1-smoke-test | Warning: resource jaegers/collector-with-high-priority is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:48:31 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:48:31 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:48:32 | examples-collector-with-priority-class/1-smoke-test | job.batch/report-span created logger.go:42: 07:48:32 | examples-collector-with-priority-class/1-smoke-test | job.batch/check-span created logger.go:42: 07:48:43 | examples-collector-with-priority-class/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:48:43 | examples-collector-with-priority-class | examples-collector-with-priority-class events from ns kuttl-test-assuring-eagle: logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:21 +0000 UTC Normal Pod collector-with-high-priority-68bc9c7d9f-jlkmr Binding Scheduled Successfully assigned kuttl-test-assuring-eagle/collector-with-high-priority-68bc9c7d9f-jlkmr to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:21 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-68bc9c7d9f SuccessfulCreate Created pod: collector-with-high-priority-68bc9c7d9f-jlkmr replicaset-controller logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:21 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled up replica set collector-with-high-priority-68bc9c7d9f to 1 deployment-controller logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:22 +0000 UTC Normal Pod collector-with-high-priority-68bc9c7d9f-jlkmr AddedInterface Add eth0 [10.129.2.53/23] from ovn-kubernetes logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:22 +0000 UTC Normal Pod collector-with-high-priority-68bc9c7d9f-jlkmr.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:22 +0000 UTC Normal Pod collector-with-high-priority-68bc9c7d9f-jlkmr.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:22 +0000 UTC Normal Pod collector-with-high-priority-68bc9c7d9f-jlkmr.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:22 +0000 UTC Normal Pod collector-with-high-priority-68bc9c7d9f-jlkmr.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:22 +0000 UTC Normal Pod collector-with-high-priority-68bc9c7d9f-jlkmr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:22 +0000 UTC Normal Pod collector-with-high-priority-68bc9c7d9f-jlkmr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:29 +0000 UTC Normal Pod collector-with-high-priority-68bc9c7d9f-jlkmr.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:29 +0000 UTC Normal Pod collector-with-high-priority-68bc9c7d9f-jlkmr.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:29 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-68bc9c7d9f SuccessfulDelete Deleted pod: collector-with-high-priority-68bc9c7d9f-jlkmr replicaset-controller logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:29 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled down replica set collector-with-high-priority-68bc9c7d9f to 0 from 1 deployment-controller logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:30 +0000 UTC Normal Pod collector-with-high-priority-5444f6db85-tzpnt Binding Scheduled Successfully assigned kuttl-test-assuring-eagle/collector-with-high-priority-5444f6db85-tzpnt to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:30 +0000 UTC Normal Pod collector-with-high-priority-5444f6db85-tzpnt AddedInterface Add eth0 [10.129.2.54/23] from ovn-kubernetes logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:30 +0000 UTC Normal Pod collector-with-high-priority-5444f6db85-tzpnt.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:30 +0000 UTC Normal Pod collector-with-high-priority-5444f6db85-tzpnt.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:30 +0000 UTC Normal Pod collector-with-high-priority-5444f6db85-tzpnt.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:30 +0000 UTC Normal Pod collector-with-high-priority-5444f6db85-tzpnt.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:30 +0000 UTC Normal Pod collector-with-high-priority-5444f6db85-tzpnt.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:30 +0000 UTC Normal Pod collector-with-high-priority-5444f6db85-tzpnt.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:30 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-5444f6db85 SuccessfulCreate Created pod: collector-with-high-priority-5444f6db85-tzpnt replicaset-controller logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:30 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled up replica set collector-with-high-priority-5444f6db85 to 1 deployment-controller logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:32 +0000 UTC Normal Pod check-span-stzt5 Binding Scheduled Successfully assigned kuttl-test-assuring-eagle/check-span-stzt5 to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:32 +0000 UTC Normal Pod check-span-stzt5 AddedInterface Add eth0 [10.128.2.68/23] from ovn-kubernetes logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:32 +0000 UTC Normal Pod check-span-stzt5.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:32 +0000 UTC Normal Pod check-span-stzt5.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:32 +0000 UTC Normal Pod check-span-stzt5.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:32 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-stzt5 job-controller logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:32 +0000 UTC Normal Pod report-span-lzzrp Binding Scheduled Successfully assigned kuttl-test-assuring-eagle/report-span-lzzrp to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:32 +0000 UTC Normal Pod report-span-lzzrp AddedInterface Add eth0 [10.131.0.70/23] from ovn-kubernetes logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:32 +0000 UTC Normal Pod report-span-lzzrp.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:32 +0000 UTC Normal Pod report-span-lzzrp.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:32 +0000 UTC Normal Pod report-span-lzzrp.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:32 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-lzzrp job-controller logger.go:42: 07:48:43 | examples-collector-with-priority-class | 2023-12-11 07:48:42 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:48:43 | examples-collector-with-priority-class | Deleting namespace: kuttl-test-assuring-eagle === CONT kuttl/harness/examples-all-in-one-with-options logger.go:42: 07:48:54 | examples-all-in-one-with-options | Creating namespace: kuttl-test-patient-giraffe logger.go:42: 07:48:54 | examples-all-in-one-with-options/0-install | starting test step 0-install logger.go:42: 07:48:55 | examples-all-in-one-with-options/0-install | Jaeger:kuttl-test-patient-giraffe/my-jaeger created logger.go:42: 07:49:01 | examples-all-in-one-with-options/0-install | test step completed 0-install logger.go:42: 07:49:01 | examples-all-in-one-with-options/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:49:01 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:49:24 | examples-all-in-one-with-options/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:49:31 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443/jaeger MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:49:31 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:49:32 | examples-all-in-one-with-options/1-smoke-test | job.batch/report-span created logger.go:42: 07:49:32 | examples-all-in-one-with-options/1-smoke-test | job.batch/check-span created logger.go:42: 07:49:43 | examples-all-in-one-with-options/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:49:43 | examples-all-in-one-with-options | examples-all-in-one-with-options events from ns kuttl-test-patient-giraffe: logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:48:58 +0000 UTC Normal Pod my-jaeger-7d9bc9bbf8-5tvwk Binding Scheduled Successfully assigned kuttl-test-patient-giraffe/my-jaeger-7d9bc9bbf8-5tvwk to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:48:58 +0000 UTC Normal Pod my-jaeger-7d9bc9bbf8-5tvwk AddedInterface Add eth0 [10.129.2.55/23] from ovn-kubernetes logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:48:58 +0000 UTC Normal ReplicaSet.apps my-jaeger-7d9bc9bbf8 SuccessfulCreate Created pod: my-jaeger-7d9bc9bbf8-5tvwk replicaset-controller logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:48:58 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-7d9bc9bbf8 to 1 deployment-controller logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:48:59 +0000 UTC Normal Pod my-jaeger-7d9bc9bbf8-5tvwk.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:48:59 +0000 UTC Normal Pod my-jaeger-7d9bc9bbf8-5tvwk.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:48:59 +0000 UTC Normal Pod my-jaeger-7d9bc9bbf8-5tvwk.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:48:59 +0000 UTC Normal Pod my-jaeger-7d9bc9bbf8-5tvwk.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:48:59 +0000 UTC Normal Pod my-jaeger-7d9bc9bbf8-5tvwk.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:48:59 +0000 UTC Normal Pod my-jaeger-7d9bc9bbf8-5tvwk.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:25 +0000 UTC Normal Pod my-jaeger-7d9bc9bbf8-5tvwk.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:25 +0000 UTC Normal Pod my-jaeger-7d9bc9bbf8-5tvwk.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:25 +0000 UTC Normal ReplicaSet.apps my-jaeger-7d9bc9bbf8 SuccessfulDelete Deleted pod: my-jaeger-7d9bc9bbf8-5tvwk replicaset-controller logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:25 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-7d9bc9bbf8 to 0 from 1 deployment-controller logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:26 +0000 UTC Normal Pod my-jaeger-54df76b7c-6hv8r Binding Scheduled Successfully assigned kuttl-test-patient-giraffe/my-jaeger-54df76b7c-6hv8r to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:26 +0000 UTC Normal Pod my-jaeger-54df76b7c-6hv8r AddedInterface Add eth0 [10.129.2.56/23] from ovn-kubernetes logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:26 +0000 UTC Normal Pod my-jaeger-54df76b7c-6hv8r.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:26 +0000 UTC Normal Pod my-jaeger-54df76b7c-6hv8r.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:26 +0000 UTC Normal Pod my-jaeger-54df76b7c-6hv8r.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:26 +0000 UTC Normal Pod my-jaeger-54df76b7c-6hv8r.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:26 +0000 UTC Normal Pod my-jaeger-54df76b7c-6hv8r.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:26 +0000 UTC Normal Pod my-jaeger-54df76b7c-6hv8r.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:26 +0000 UTC Normal ReplicaSet.apps my-jaeger-54df76b7c SuccessfulCreate Created pod: my-jaeger-54df76b7c-6hv8r replicaset-controller logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:26 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-54df76b7c to 1 deployment-controller logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:32 +0000 UTC Normal Pod check-span-cv6wz Binding Scheduled Successfully assigned kuttl-test-patient-giraffe/check-span-cv6wz to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:32 +0000 UTC Normal Pod check-span-cv6wz AddedInterface Add eth0 [10.128.2.69/23] from ovn-kubernetes logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:32 +0000 UTC Normal Pod check-span-cv6wz.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:32 +0000 UTC Normal Pod check-span-cv6wz.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:32 +0000 UTC Normal Pod check-span-cv6wz.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:32 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-cv6wz job-controller logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:32 +0000 UTC Normal Pod report-span-xzqdv Binding Scheduled Successfully assigned kuttl-test-patient-giraffe/report-span-xzqdv to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:32 +0000 UTC Normal Pod report-span-xzqdv AddedInterface Add eth0 [10.131.0.71/23] from ovn-kubernetes logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:32 +0000 UTC Normal Pod report-span-xzqdv.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:32 +0000 UTC Normal Pod report-span-xzqdv.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:32 +0000 UTC Normal Pod report-span-xzqdv.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:32 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-xzqdv job-controller logger.go:42: 07:49:43 | examples-all-in-one-with-options | 2023-12-11 07:49:43 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:49:43 | examples-all-in-one-with-options | Deleting namespace: kuttl-test-patient-giraffe === CONT kuttl/harness/examples-auto-provision-kafka logger.go:42: 07:51:26 | examples-auto-provision-kafka | Creating namespace: kuttl-test-clear-ostrich logger.go:42: 07:51:26 | examples-auto-provision-kafka/2-install | starting test step 2-install logger.go:42: 07:51:26 | examples-auto-provision-kafka/2-install | Jaeger:kuttl-test-clear-ostrich/auto-provision-kafka created logger.go:42: 07:52:27 | examples-auto-provision-kafka/2-install | test step completed 2-install logger.go:42: 07:52:27 | examples-auto-provision-kafka/3- | starting test step 3- logger.go:42: 07:53:02 | examples-auto-provision-kafka/3- | test step completed 3- logger.go:42: 07:53:02 | examples-auto-provision-kafka/4- | starting test step 4- logger.go:42: 07:53:23 | examples-auto-provision-kafka/4- | test step completed 4- logger.go:42: 07:53:23 | examples-auto-provision-kafka/5- | starting test step 5- logger.go:42: 07:53:29 | examples-auto-provision-kafka/5- | test step completed 5- logger.go:42: 07:53:29 | examples-auto-provision-kafka/6-smoke-test | starting test step 6-smoke-test logger.go:42: 07:53:29 | examples-auto-provision-kafka/6-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE auto-provision-kafka /dev/null] logger.go:42: 07:53:31 | examples-auto-provision-kafka/6-smoke-test | Warning: resource jaegers/auto-provision-kafka is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:53:37 | examples-auto-provision-kafka/6-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a JAEGER_COLLECTOR_ENDPOINT=http://auto-provision-kafka-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://auto-provision-kafka-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:53:38 | examples-auto-provision-kafka/6-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:53:38 | examples-auto-provision-kafka/6-smoke-test | job.batch/report-span created logger.go:42: 07:53:38 | examples-auto-provision-kafka/6-smoke-test | job.batch/check-span created logger.go:42: 08:03:39 | examples-auto-provision-kafka/6-smoke-test | test step failed 6-smoke-test case.go:364: failed in step 6-smoke-test case.go:366: --- Job:kuttl-test-clear-ostrich/check-span +++ Job:kuttl-test-clear-ostrich/check-span @@ -1,8 +1,141 @@ apiVersion: batch/v1 kind: Job metadata: + annotations: + kubectl.kubernetes.io/last-applied-configuration: | + {"apiVersion":"batch/v1","kind":"Job","metadata":{"annotations":{},"name":"check-span","namespace":"kuttl-test-clear-ostrich"},"spec":{"backoffLimit":15,"template":{"spec":{"containers":[{"command":["./query"],"env":[{"name":"SERVICE_NAME","value":"smoke-test-service"},{"name":"QUERY_HOST","value":"https://auto-provision-kafka-query:443"},{"name":"SECRET_PATH","value":"/var/run/secrets/api-token/token"}],"image":"registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a","name":"asserts-container","volumeMounts":[{"mountPath":"/var/run/secrets/api-token","name":"token-api-volume"}]}],"restartPolicy":"OnFailure","volumes":[{"name":"token-api-volume","secret":{"secretName":"e2e-test"}}]}}}} + labels: + batch.kubernetes.io/controller-uid: de94f9ac-89c1-4eea-a789-c11d87454093 + batch.kubernetes.io/job-name: check-span + controller-uid: de94f9ac-89c1-4eea-a789-c11d87454093 + job-name: check-span + managedFields: + - apiVersion: batch/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:kubectl.kubernetes.io/last-applied-configuration: {} + f:spec: + f:backoffLimit: {} + f:completionMode: {} + f:completions: {} + f:parallelism: {} + f:suspend: {} + f:template: + f:spec: + f:containers: + k:{"name":"asserts-container"}: + .: {} + f:command: {} + f:env: + .: {} + k:{"name":"QUERY_HOST"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SECRET_PATH"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SERVICE_NAME"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:name: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/var/run/secrets/api-token"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"token-api-volume"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: kubectl-client-side-apply + operation: Update + time: "2023-12-11T07:53:38Z" + - apiVersion: batch/v1 + fieldsType: FieldsV1 + fieldsV1: + f:status: + f:active: {} + f:ready: {} + f:startTime: {} + f:uncountedTerminatedPods: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2023-12-11T08:01:57Z" name: check-span namespace: kuttl-test-clear-ostrich +spec: + backoffLimit: 15 + completionMode: NonIndexed + completions: 1 + parallelism: 1 + selector: + matchLabels: + batch.kubernetes.io/controller-uid: de94f9ac-89c1-4eea-a789-c11d87454093 + suspend: false + template: + metadata: + creationTimestamp: null + labels: + batch.kubernetes.io/controller-uid: de94f9ac-89c1-4eea-a789-c11d87454093 + batch.kubernetes.io/job-name: check-span + controller-uid: de94f9ac-89c1-4eea-a789-c11d87454093 + job-name: check-span + spec: + containers: + - command: + - ./query + env: + - name: SERVICE_NAME + value: smoke-test-service + - name: QUERY_HOST + value: https://auto-provision-kafka-query:443 + - name: SECRET_PATH + value: /var/run/secrets/api-token/token + image: registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a + imagePullPolicy: IfNotPresent + name: asserts-container + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /var/run/secrets/api-token + name: token-api-volume + dnsPolicy: ClusterFirst + restartPolicy: OnFailure + schedulerName: default-scheduler + securityContext: {} + terminationGracePeriodSeconds: 30 + volumes: + - name: token-api-volume + secret: + defaultMode: 420 + secretName: e2e-test status: - succeeded: 1 + active: 1 + ready: 1 + startTime: "2023-12-11T07:53:38Z" + uncountedTerminatedPods: {} case.go:366: resource Job:kuttl-test-clear-ostrich/check-span: .status.succeeded: key is missing from map logger.go:42: 08:03:39 | examples-auto-provision-kafka | examples-auto-provision-kafka events from ns kuttl-test-clear-ostrich: logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:51:31 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestclearostrichautoprovisionkaf-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestclearostrichautoprovisionkaf-1-79b868bdc4 to 1 deployment-controller logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:51:32 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestclearostrichautoprovisionkaf-1-79b868bdc4 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestclearostrichautoprovisionkaf-1-vn99q replicaset-controller logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:51:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestclearostrichautoprovisionkaf-1-vn99q Binding Scheduled Successfully assigned kuttl-test-clear-ostrich/elasticsearch-cdm-kuttltestclearostrichautoprovisionkaf-1-vn99q to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:51:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestclearostrichautoprovisionkaf-1-vn99q AddedInterface Add eth0 [10.129.2.57/23] from ovn-kubernetes logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:51:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestclearostrichautoprovisionkaf-1-vn99q.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:51:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestclearostrichautoprovisionkaf-1-vn99q.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:51:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestclearostrichautoprovisionkaf-1-vn99q.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:51:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestclearostrichautoprovisionkaf-1-vn99q.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:51:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestclearostrichautoprovisionkaf-1-vn99q.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:51:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestclearostrichautoprovisionkaf-1-vn99q.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:51:42 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestclearostrichautoprovisionkaf-1-vn99q.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:51:47 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestclearostrichautoprovisionkaf-1-vn99q.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:51:59 +0000 UTC Normal PodDisruptionBudget.policy auto-provision-kafka-zookeeper NoPods No matching pods found controllermanager logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:51:59 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:51:59 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-clear-ostrich/data-auto-provision-kafka-zookeeper-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-74b54944c8-99gpt_8201e0f8-9afa-4504-b698-cbc6cdf4803e logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:51:59 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:52:04 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-clear-ostrich/auto-provision-kafka-zookeeper-0 to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:52:04 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-242e6a0a-feef-4a56-823f-c524f3c9bf8d ebs.csi.aws.com_aws-ebs-csi-driver-controller-74b54944c8-99gpt_8201e0f8-9afa-4504-b698-cbc6cdf4803e logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:52:06 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-242e6a0a-feef-4a56-823f-c524f3c9bf8d" attachdetach-controller logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:52:07 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0 AddedInterface Add eth0 [10.131.0.72/23] from ovn-kubernetes logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:52:07 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0.spec.containers{zookeeper} Pulled Container image "registry.redhat.io/amq-streams/kafka-36-rhel8@sha256:55b18000b2e774adccd0d5393b58fc3b8358ab174eaaa3ada9d520b5cde30669" already present on machine kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:52:07 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:52:07 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:52:29 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:52:30 +0000 UTC Normal PodDisruptionBudget.policy auto-provision-kafka-kafka NoPods No matching pods found controllermanager logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:52:30 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:52:30 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-clear-ostrich/data-0-auto-provision-kafka-kafka-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-74b54944c8-99gpt_8201e0f8-9afa-4504-b698-cbc6cdf4803e logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:52:35 +0000 UTC Normal Pod auto-provision-kafka-kafka-0 Binding Scheduled Successfully assigned kuttl-test-clear-ostrich/auto-provision-kafka-kafka-0 to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:52:35 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-488a7048-c13b-45a4-bad1-7b8379948a85 ebs.csi.aws.com_aws-ebs-csi-driver-controller-74b54944c8-99gpt_8201e0f8-9afa-4504-b698-cbc6cdf4803e logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:52:37 +0000 UTC Normal Pod auto-provision-kafka-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-488a7048-c13b-45a4-bad1-7b8379948a85" attachdetach-controller logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:52:41 +0000 UTC Normal Pod auto-provision-kafka-kafka-0 AddedInterface Add eth0 [10.128.2.70/23] from ovn-kubernetes logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:52:41 +0000 UTC Normal Pod auto-provision-kafka-kafka-0.spec.containers{kafka} Pulled Container image "registry.redhat.io/amq-streams/kafka-36-rhel8@sha256:55b18000b2e774adccd0d5393b58fc3b8358ab174eaaa3ada9d520b5cde30669" already present on machine kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:52:41 +0000 UTC Normal Pod auto-provision-kafka-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:52:41 +0000 UTC Normal Pod auto-provision-kafka-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:03 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-7c4b5dbdd9-2lw4c Binding Scheduled Successfully assigned kuttl-test-clear-ostrich/auto-provision-kafka-entity-operator-7c4b5dbdd9-2lw4c to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:03 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-7c4b5dbdd9-2lw4c AddedInterface Add eth0 [10.128.2.71/23] from ovn-kubernetes logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:03 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-7c4b5dbdd9-2lw4c.spec.containers{topic-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:f1be1aa2f18276f9169893eb55e3733cd52fa38f2101a9b3925f79774841689f" already present on machine kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:03 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-7c4b5dbdd9-2lw4c.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:03 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-7c4b5dbdd9-2lw4c.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:03 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-7c4b5dbdd9-2lw4c.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:f1be1aa2f18276f9169893eb55e3733cd52fa38f2101a9b3925f79774841689f" already present on machine kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:03 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-7c4b5dbdd9-2lw4c.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:03 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-7c4b5dbdd9-2lw4c.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:03 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-7c4b5dbdd9-2lw4c.spec.containers{tls-sidecar} Pulled Container image "registry.redhat.io/amq-streams/kafka-36-rhel8@sha256:55b18000b2e774adccd0d5393b58fc3b8358ab174eaaa3ada9d520b5cde30669" already present on machine kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:03 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-7c4b5dbdd9-2lw4c.spec.containers{tls-sidecar} Created Created container tls-sidecar kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:03 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-7c4b5dbdd9-2lw4c.spec.containers{tls-sidecar} Started Started container tls-sidecar kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:03 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-entity-operator-7c4b5dbdd9 SuccessfulCreate Created pod: auto-provision-kafka-entity-operator-7c4b5dbdd9-2lw4c replicaset-controller logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:03 +0000 UTC Normal Deployment.apps auto-provision-kafka-entity-operator ScalingReplicaSet Scaled up replica set auto-provision-kafka-entity-operator-7c4b5dbdd9 to 1 deployment-controller logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:25 +0000 UTC Normal Pod auto-provision-kafka-collector-7d7c8659c6-bmh6w Binding Scheduled Successfully assigned kuttl-test-clear-ostrich/auto-provision-kafka-collector-7d7c8659c6-bmh6w to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:25 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-collector-7d7c8659c6 SuccessfulCreate Created pod: auto-provision-kafka-collector-7d7c8659c6-bmh6w replicaset-controller logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:25 +0000 UTC Normal Deployment.apps auto-provision-kafka-collector ScalingReplicaSet Scaled up replica set auto-provision-kafka-collector-7d7c8659c6 to 1 deployment-controller logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:25 +0000 UTC Normal Pod auto-provision-kafka-ingester-747757b78d-km48j Binding Scheduled Successfully assigned kuttl-test-clear-ostrich/auto-provision-kafka-ingester-747757b78d-km48j to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:25 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-ingester-747757b78d SuccessfulCreate Created pod: auto-provision-kafka-ingester-747757b78d-km48j replicaset-controller logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:25 +0000 UTC Normal Deployment.apps auto-provision-kafka-ingester ScalingReplicaSet Scaled up replica set auto-provision-kafka-ingester-747757b78d to 1 deployment-controller logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:25 +0000 UTC Normal Pod auto-provision-kafka-query-f4bc557df-brx7v Binding Scheduled Successfully assigned kuttl-test-clear-ostrich/auto-provision-kafka-query-f4bc557df-brx7v to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:25 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-query-f4bc557df SuccessfulCreate Created pod: auto-provision-kafka-query-f4bc557df-brx7v replicaset-controller logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:25 +0000 UTC Normal Deployment.apps auto-provision-kafka-query ScalingReplicaSet Scaled up replica set auto-provision-kafka-query-f4bc557df to 1 deployment-controller logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:26 +0000 UTC Normal Pod auto-provision-kafka-collector-7d7c8659c6-bmh6w AddedInterface Add eth0 [10.131.0.73/23] from ovn-kubernetes logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:26 +0000 UTC Normal Pod auto-provision-kafka-collector-7d7c8659c6-bmh6w.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:26 +0000 UTC Normal Pod auto-provision-kafka-collector-7d7c8659c6-bmh6w.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:26 +0000 UTC Normal Pod auto-provision-kafka-collector-7d7c8659c6-bmh6w.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:26 +0000 UTC Normal Pod auto-provision-kafka-ingester-747757b78d-km48j AddedInterface Add eth0 [10.131.0.75/23] from ovn-kubernetes logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:26 +0000 UTC Normal Pod auto-provision-kafka-ingester-747757b78d-km48j.spec.containers{jaeger-ingester} Pulled Container image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:849018528225b7370cc4740fc9f94bef7ffd4195328a916a6013d88f885eebe2" already present on machine kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:26 +0000 UTC Normal Pod auto-provision-kafka-ingester-747757b78d-km48j.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:26 +0000 UTC Normal Pod auto-provision-kafka-ingester-747757b78d-km48j.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:26 +0000 UTC Warning Pod auto-provision-kafka-query-f4bc557df-brx7v FailedMount MountVolume.SetUp failed for volume "auto-provision-kafka-ui-oauth-proxy-tls" : secret "auto-provision-kafka-ui-oauth-proxy-tls" not found kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:26 +0000 UTC Normal Pod auto-provision-kafka-query-f4bc557df-brx7v AddedInterface Add eth0 [10.131.0.74/23] from ovn-kubernetes logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:26 +0000 UTC Normal Pod auto-provision-kafka-query-f4bc557df-brx7v.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:26 +0000 UTC Normal Pod auto-provision-kafka-query-f4bc557df-brx7v.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:26 +0000 UTC Normal Pod auto-provision-kafka-query-f4bc557df-brx7v.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:26 +0000 UTC Normal Pod auto-provision-kafka-query-f4bc557df-brx7v.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:27 +0000 UTC Normal Pod auto-provision-kafka-query-f4bc557df-brx7v.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:27 +0000 UTC Normal Pod auto-provision-kafka-query-f4bc557df-brx7v.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:27 +0000 UTC Normal Pod auto-provision-kafka-query-f4bc557df-brx7v.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:27 +0000 UTC Normal Pod auto-provision-kafka-query-f4bc557df-brx7v.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:27 +0000 UTC Normal Pod auto-provision-kafka-query-f4bc557df-brx7v.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:38 +0000 UTC Normal Pod check-span-p7cdb Binding Scheduled Successfully assigned kuttl-test-clear-ostrich/check-span-p7cdb to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:38 +0000 UTC Normal Pod check-span-p7cdb AddedInterface Add eth0 [10.128.2.73/23] from ovn-kubernetes logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:38 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-p7cdb job-controller logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:38 +0000 UTC Normal Pod report-span-5dthx Binding Scheduled Successfully assigned kuttl-test-clear-ostrich/report-span-5dthx to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:38 +0000 UTC Normal Pod report-span-5dthx AddedInterface Add eth0 [10.128.2.72/23] from ovn-kubernetes logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:38 +0000 UTC Normal Pod report-span-5dthx.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:38 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-5dthx job-controller logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:39 +0000 UTC Normal Pod check-span-p7cdb.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:39 +0000 UTC Normal Pod check-span-p7cdb.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:39 +0000 UTC Normal Pod check-span-p7cdb.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:39 +0000 UTC Normal Pod report-span-5dthx.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:39 +0000 UTC Normal Pod report-span-5dthx.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:40 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:40 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:40 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:40 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:40 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:53:40 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:54:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:54:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod auto-provision-kafka-collector-7d7c8659c6-bmh6w horizontal-pod-autoscaler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:54:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:54:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:54:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-ingester of Pod auto-provision-kafka-ingester-747757b78d-km48j horizontal-pod-autoscaler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:54:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:58:41 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod auto-provision-kafka-collector-7d7c8659c6-bmh6w horizontal-pod-autoscaler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:58:41 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-ingester of Pod auto-provision-kafka-ingester-747757b78d-km48j horizontal-pod-autoscaler logger.go:42: 08:03:39 | examples-auto-provision-kafka | 2023-12-11 07:58:43 +0000 UTC Warning Pod check-span-p7cdb.spec.containers{asserts-container} BackOff Back-off restarting failed container asserts-container in pod check-span-p7cdb_kuttl-test-clear-ostrich(361fb94e-8940-4dd3-bf2f-a4f056708bc3) kubelet logger.go:42: 08:03:39 | examples-auto-provision-kafka | Deleting namespace: kuttl-test-clear-ostrich === CONT kuttl/harness/examples-agent-with-priority-class logger.go:42: 08:04:18 | examples-agent-with-priority-class | Creating namespace: kuttl-test-meet-marmot logger.go:42: 08:04:18 | examples-agent-with-priority-class/0-install | starting test step 0-install logger.go:42: 08:04:18 | examples-agent-with-priority-class/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 08:04:18 | examples-agent-with-priority-class/0-install | ServiceAccount:kuttl-test-meet-marmot/jaeger-agent-daemonset created logger.go:42: 08:04:18 | examples-agent-with-priority-class/0-install | test step completed 0-install logger.go:42: 08:04:18 | examples-agent-with-priority-class/1-install | starting test step 1-install logger.go:42: 08:04:18 | examples-agent-with-priority-class/1-install | PriorityClass:/high-priority created logger.go:42: 08:04:18 | examples-agent-with-priority-class/1-install | Jaeger:kuttl-test-meet-marmot/agent-as-daemonset created logger.go:42: 08:04:23 | examples-agent-with-priority-class/1-install | test step completed 1-install logger.go:42: 08:04:23 | examples-agent-with-priority-class/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:04:23 | examples-agent-with-priority-class/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 08:06:18 | examples-agent-with-priority-class/2-smoke-test | Unable to connect to the server: context deadline exceeded (Client.Timeout exceeded while awaiting headers) logger.go:42: 08:06:18 | examples-agent-with-priority-class/2-smoke-test | command failure, skipping 2 additional commands case.go:364: failed in step 2-smoke-test case.go:366: exit status 1 logger.go:42: 08:06:18 | examples-agent-with-priority-class | examples-agent-with-priority-class events from ns kuttl-test-meet-marmot: logger.go:42: 08:06:18 | examples-agent-with-priority-class | 2023-12-11 08:04:22 +0000 UTC Warning DaemonSet.apps agent-as-daemonset-agent-daemonset FailedCreate Error creating: pods "agent-as-daemonset-agent-daemonset-" is forbidden: unable to validate against any security context constraint: [provider "anyuid": Forbidden: not usable by user or serviceaccount, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5775: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5778: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6831: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6832: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 14271: Host ports are not allowed to be used, provider "restricted": Forbidden: not usable by user or serviceaccount, provider "nonroot-v2": Forbidden: not usable by user or serviceaccount, provider "nonroot": Forbidden: not usable by user or serviceaccount, provider "hostmount-anyuid": Forbidden: not usable by user or serviceaccount, provider "elasticsearch-scc": Forbidden: not usable by user or serviceaccount, provider "machine-api-termination-handler": Forbidden: not usable by user or serviceaccount, provider "daemonset-with-hostport": Forbidden: not usable by user or serviceaccount, provider "hostnetwork-v2": Forbidden: not usable by user or serviceaccount, provider "hostnetwork": Forbidden: not usable by user or serviceaccount, provider "hostaccess": Forbidden: not usable by user or serviceaccount, provider "node-exporter": Forbidden: not usable by user or serviceaccount, provider "privileged": Forbidden: not usable by user or serviceaccount] daemonset-controller logger.go:42: 08:06:18 | examples-agent-with-priority-class | 2023-12-11 08:04:22 +0000 UTC Normal Pod agent-as-daemonset-c66dd5d85-sc87k Binding Scheduled Successfully assigned kuttl-test-meet-marmot/agent-as-daemonset-c66dd5d85-sc87k to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 08:06:18 | examples-agent-with-priority-class | 2023-12-11 08:04:22 +0000 UTC Normal Pod agent-as-daemonset-c66dd5d85-sc87k AddedInterface Add eth0 [10.129.2.58/23] from ovn-kubernetes logger.go:42: 08:06:18 | examples-agent-with-priority-class | 2023-12-11 08:04:22 +0000 UTC Normal Pod agent-as-daemonset-c66dd5d85-sc87k.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 08:06:18 | examples-agent-with-priority-class | 2023-12-11 08:04:22 +0000 UTC Normal Pod agent-as-daemonset-c66dd5d85-sc87k.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:06:18 | examples-agent-with-priority-class | 2023-12-11 08:04:22 +0000 UTC Normal Pod agent-as-daemonset-c66dd5d85-sc87k.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:06:18 | examples-agent-with-priority-class | 2023-12-11 08:04:22 +0000 UTC Normal Pod agent-as-daemonset-c66dd5d85-sc87k.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 08:06:18 | examples-agent-with-priority-class | 2023-12-11 08:04:22 +0000 UTC Normal Pod agent-as-daemonset-c66dd5d85-sc87k.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:06:18 | examples-agent-with-priority-class | 2023-12-11 08:04:22 +0000 UTC Normal Pod agent-as-daemonset-c66dd5d85-sc87k.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:06:18 | examples-agent-with-priority-class | 2023-12-11 08:04:22 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-c66dd5d85 SuccessfulCreate Created pod: agent-as-daemonset-c66dd5d85-sc87k replicaset-controller logger.go:42: 08:06:18 | examples-agent-with-priority-class | 2023-12-11 08:04:22 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-c66dd5d85 to 1 deployment-controller logger.go:42: 08:06:18 | examples-agent-with-priority-class | Deleting namespace: kuttl-test-meet-marmot === CONT kuttl/harness/examples-agent-as-daemonset logger.go:42: 08:06:24 | examples-agent-as-daemonset | Creating namespace: kuttl-test-central-snapper logger.go:42: 08:06:24 | examples-agent-as-daemonset/0-install | starting test step 0-install logger.go:42: 08:06:24 | examples-agent-as-daemonset/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 08:06:24 | examples-agent-as-daemonset/0-install | ServiceAccount:kuttl-test-central-snapper/jaeger-agent-daemonset created logger.go:42: 08:06:24 | examples-agent-as-daemonset/0-install | test step completed 0-install logger.go:42: 08:06:24 | examples-agent-as-daemonset/1-install | starting test step 1-install logger.go:42: 08:06:24 | examples-agent-as-daemonset/1-install | Jaeger:kuttl-test-central-snapper/agent-as-daemonset created logger.go:42: 08:06:30 | examples-agent-as-daemonset/1-install | test step completed 1-install logger.go:42: 08:06:30 | examples-agent-as-daemonset/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:06:30 | examples-agent-as-daemonset/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 08:10:02 | examples-agent-as-daemonset/2-smoke-test | Warning: resource jaegers/agent-as-daemonset is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:12:27 | examples-agent-as-daemonset/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:12:28 | examples-agent-as-daemonset/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:12:28 | examples-agent-as-daemonset/2-smoke-test | job.batch/report-span created logger.go:42: 08:12:29 | examples-agent-as-daemonset/2-smoke-test | job.batch/check-span created logger.go:42: 08:22:29 | examples-agent-as-daemonset/2-smoke-test | test step failed 2-smoke-test case.go:364: failed in step 2-smoke-test case.go:366: --- Job:kuttl-test-central-snapper/check-span +++ Job:kuttl-test-central-snapper/check-span @@ -1,8 +1,141 @@ apiVersion: batch/v1 kind: Job metadata: + annotations: + kubectl.kubernetes.io/last-applied-configuration: | + {"apiVersion":"batch/v1","kind":"Job","metadata":{"annotations":{},"name":"check-span","namespace":"kuttl-test-central-snapper"},"spec":{"backoffLimit":15,"template":{"spec":{"containers":[{"command":["./query"],"env":[{"name":"SERVICE_NAME","value":"smoke-test-service"},{"name":"QUERY_HOST","value":"https://agent-as-daemonset-query:443"},{"name":"SECRET_PATH","value":"/var/run/secrets/api-token/token"}],"image":"registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a","name":"asserts-container","volumeMounts":[{"mountPath":"/var/run/secrets/api-token","name":"token-api-volume"}]}],"restartPolicy":"OnFailure","volumes":[{"name":"token-api-volume","secret":{"secretName":"e2e-test"}}]}}}} + labels: + batch.kubernetes.io/controller-uid: 30f3d1f2-6848-4333-b36f-c9775d5e2478 + batch.kubernetes.io/job-name: check-span + controller-uid: 30f3d1f2-6848-4333-b36f-c9775d5e2478 + job-name: check-span + managedFields: + - apiVersion: batch/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:kubectl.kubernetes.io/last-applied-configuration: {} + f:spec: + f:backoffLimit: {} + f:completionMode: {} + f:completions: {} + f:parallelism: {} + f:suspend: {} + f:template: + f:spec: + f:containers: + k:{"name":"asserts-container"}: + .: {} + f:command: {} + f:env: + .: {} + k:{"name":"QUERY_HOST"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SECRET_PATH"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SERVICE_NAME"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:name: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/var/run/secrets/api-token"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"token-api-volume"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: kubectl-client-side-apply + operation: Update + time: "2023-12-11T08:12:28Z" + - apiVersion: batch/v1 + fieldsType: FieldsV1 + fieldsV1: + f:status: + f:active: {} + f:ready: {} + f:startTime: {} + f:uncountedTerminatedPods: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2023-12-11T08:20:49Z" name: check-span namespace: kuttl-test-central-snapper +spec: + backoffLimit: 15 + completionMode: NonIndexed + completions: 1 + parallelism: 1 + selector: + matchLabels: + batch.kubernetes.io/controller-uid: 30f3d1f2-6848-4333-b36f-c9775d5e2478 + suspend: false + template: + metadata: + creationTimestamp: null + labels: + batch.kubernetes.io/controller-uid: 30f3d1f2-6848-4333-b36f-c9775d5e2478 + batch.kubernetes.io/job-name: check-span + controller-uid: 30f3d1f2-6848-4333-b36f-c9775d5e2478 + job-name: check-span + spec: + containers: + - command: + - ./query + env: + - name: SERVICE_NAME + value: smoke-test-service + - name: QUERY_HOST + value: https://agent-as-daemonset-query:443 + - name: SECRET_PATH + value: /var/run/secrets/api-token/token + image: registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a + imagePullPolicy: IfNotPresent + name: asserts-container + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /var/run/secrets/api-token + name: token-api-volume + dnsPolicy: ClusterFirst + restartPolicy: OnFailure + schedulerName: default-scheduler + securityContext: {} + terminationGracePeriodSeconds: 30 + volumes: + - name: token-api-volume + secret: + defaultMode: 420 + secretName: e2e-test status: - succeeded: 1 + active: 1 + ready: 1 + startTime: "2023-12-11T08:12:28Z" + uncountedTerminatedPods: {} case.go:366: resource Job:kuttl-test-central-snapper/check-span: .status.succeeded: key is missing from map logger.go:42: 08:22:29 | examples-agent-as-daemonset | examples-agent-as-daemonset events from ns kuttl-test-central-snapper: logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:06:28 +0000 UTC Normal Pod agent-as-daemonset-6df97bb844-j64rp Binding Scheduled Successfully assigned kuttl-test-central-snapper/agent-as-daemonset-6df97bb844-j64rp to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:06:28 +0000 UTC Warning Pod agent-as-daemonset-6df97bb844-j64rp FailedMount MountVolume.SetUp failed for volume "agent-as-daemonset-ui-oauth-proxy-tls" : secret "agent-as-daemonset-ui-oauth-proxy-tls" not found kubelet logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:06:28 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-6df97bb844 SuccessfulCreate Created pod: agent-as-daemonset-6df97bb844-j64rp replicaset-controller logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:06:28 +0000 UTC Warning DaemonSet.apps agent-as-daemonset-agent-daemonset FailedCreate Error creating: pods "agent-as-daemonset-agent-daemonset-" is forbidden: unable to validate against any security context constraint: [provider "anyuid": Forbidden: not usable by user or serviceaccount, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5775: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5778: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6831: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6832: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 14271: Host ports are not allowed to be used, provider "restricted": Forbidden: not usable by user or serviceaccount, provider "nonroot-v2": Forbidden: not usable by user or serviceaccount, provider "nonroot": Forbidden: not usable by user or serviceaccount, provider "hostmount-anyuid": Forbidden: not usable by user or serviceaccount, provider "elasticsearch-scc": Forbidden: not usable by user or serviceaccount, provider "machine-api-termination-handler": Forbidden: not usable by user or serviceaccount, provider "daemonset-with-hostport": Forbidden: not usable by user or serviceaccount, provider "hostnetwork-v2": Forbidden: not usable by user or serviceaccount, provider "hostnetwork": Forbidden: not usable by user or serviceaccount, provider "hostaccess": Forbidden: not usable by user or serviceaccount, provider "node-exporter": Forbidden: not usable by user or serviceaccount, provider "privileged": Forbidden: not usable by user or serviceaccount] daemonset-controller logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:06:28 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-6df97bb844 to 1 deployment-controller logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:06:29 +0000 UTC Normal Pod agent-as-daemonset-6df97bb844-j64rp AddedInterface Add eth0 [10.129.2.59/23] from ovn-kubernetes logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:06:29 +0000 UTC Normal Pod agent-as-daemonset-6df97bb844-j64rp.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:06:29 +0000 UTC Normal Pod agent-as-daemonset-6df97bb844-j64rp.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:06:29 +0000 UTC Normal Pod agent-as-daemonset-6df97bb844-j64rp.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:06:29 +0000 UTC Normal Pod agent-as-daemonset-6df97bb844-j64rp.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:06:29 +0000 UTC Normal Pod agent-as-daemonset-6df97bb844-j64rp.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:06:29 +0000 UTC Normal Pod agent-as-daemonset-6df97bb844-j64rp.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:12:28 +0000 UTC Normal Pod report-span-cm6hx Binding Scheduled Successfully assigned kuttl-test-central-snapper/report-span-cm6hx to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:12:28 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-cm6hx job-controller logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:12:29 +0000 UTC Normal Pod check-span-qqvk5 Binding Scheduled Successfully assigned kuttl-test-central-snapper/check-span-qqvk5 to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:12:29 +0000 UTC Normal Pod check-span-qqvk5 AddedInterface Add eth0 [10.128.2.74/23] from ovn-kubernetes logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:12:29 +0000 UTC Normal Pod check-span-qqvk5.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:12:29 +0000 UTC Normal Pod check-span-qqvk5.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:12:29 +0000 UTC Normal Pod check-span-qqvk5.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:12:29 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-qqvk5 job-controller logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:12:29 +0000 UTC Normal Pod report-span-cm6hx AddedInterface Add eth0 [10.131.0.77/23] from ovn-kubernetes logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:12:29 +0000 UTC Normal Pod report-span-cm6hx.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:12:29 +0000 UTC Normal Pod report-span-cm6hx.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:12:29 +0000 UTC Normal Pod report-span-cm6hx.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:22:29 | examples-agent-as-daemonset | 2023-12-11 08:17:34 +0000 UTC Warning Pod check-span-qqvk5.spec.containers{asserts-container} BackOff Back-off restarting failed container asserts-container in pod check-span-qqvk5_kuttl-test-central-snapper(9912b112-25ef-4218-9fb7-5b6d3bbf5008) kubelet logger.go:42: 08:22:29 | examples-agent-as-daemonset | Deleting namespace: kuttl-test-central-snapper === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- FAIL: kuttl (3967.33s) --- FAIL: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.68s) --- PASS: kuttl/harness/examples-service-types (60.04s) --- PASS: kuttl/harness/examples-with-sampling (57.49s) --- FAIL: kuttl/harness/examples-with-cassandra (101.97s) --- PASS: kuttl/harness/examples-with-badger-and-volume (259.14s) --- PASS: kuttl/harness/examples-with-badger (292.82s) --- PASS: kuttl/harness/examples-simplest (115.22s) --- PASS: kuttl/harness/examples-simple-prod-with-volumes (287.85s) --- FAIL: kuttl/harness/examples-simple-prod (269.81s) --- PASS: kuttl/harness/examples-business-application-injected-sidecar (131.85s) --- PASS: kuttl/harness/examples-openshift-with-htpasswd (29.10s) --- PASS: kuttl/harness/examples-openshift-agent-as-daemonset (197.40s) --- PASS: kuttl/harness/examples-collector-with-priority-class (36.91s) --- PASS: kuttl/harness/examples-all-in-one-with-options (60.41s) --- FAIL: kuttl/harness/examples-auto-provision-kafka (841.61s) --- FAIL: kuttl/harness/examples-agent-with-priority-class (147.47s) --- FAIL: kuttl/harness/examples-agent-as-daemonset (977.16s) FAIL + exit_code=1 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name examples --report --output /logs/artifacts/examples.xml ./artifacts/kuttl-report.xml time="2023-12-11T08:26:13Z" level=debug msg="Setting a new name for the test suites" time="2023-12-11T08:26:13Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-11T08:26:13Z" level=debug msg="normalizing test case names" time="2023-12-11T08:26:13Z" level=debug msg="examples/artifacts -> examples_artifacts" time="2023-12-11T08:26:13Z" level=debug msg="examples/examples-service-types -> examples_examples_service_types" time="2023-12-11T08:26:13Z" level=debug msg="examples/examples-with-sampling -> examples_examples_with_sampling" time="2023-12-11T08:26:13Z" level=debug msg="examples/examples-with-cassandra -> examples_examples_with_cassandra" time="2023-12-11T08:26:13Z" level=debug msg="examples/examples-with-badger-and-volume -> examples_examples_with_badger_and_volume" time="2023-12-11T08:26:13Z" level=debug msg="examples/examples-with-badger -> examples_examples_with_badger" time="2023-12-11T08:26:13Z" level=debug msg="examples/examples-simplest -> examples_examples_simplest" time="2023-12-11T08:26:13Z" level=debug msg="examples/examples-simple-prod-with-volumes -> examples_examples_simple_prod_with_volumes" time="2023-12-11T08:26:13Z" level=debug msg="examples/examples-simple-prod -> examples_examples_simple_prod" time="2023-12-11T08:26:13Z" level=debug msg="examples/examples-business-application-injected-sidecar -> examples_examples_business_application_injected_sidecar" time="2023-12-11T08:26:13Z" level=debug msg="examples/examples-openshift-with-htpasswd -> examples_examples_openshift_with_htpasswd" time="2023-12-11T08:26:13Z" level=debug msg="examples/examples-openshift-agent-as-daemonset -> examples_examples_openshift_agent_as_daemonset" time="2023-12-11T08:26:13Z" level=debug msg="examples/examples-collector-with-priority-class -> examples_examples_collector_with_priority_class" time="2023-12-11T08:26:13Z" level=debug msg="examples/examples-all-in-one-with-options -> examples_examples_all_in_one_with_options" time="2023-12-11T08:26:13Z" level=debug msg="examples/examples-auto-provision-kafka -> examples_examples_auto_provision_kafka" time="2023-12-11T08:26:13Z" level=debug msg="examples/examples-agent-with-priority-class -> examples_examples_agent_with_priority_class" time="2023-12-11T08:26:13Z" level=debug msg="examples/examples-agent-as-daemonset -> examples_examples_agent_as_daemonset" +---------------------------------------------------------+--------+ | NAME | RESULT | +---------------------------------------------------------+--------+ | examples_artifacts | passed | | examples_examples_service_types | passed | | examples_examples_with_sampling | passed | | examples_examples_with_cassandra | failed | | examples_examples_with_badger_and_volume | passed | | examples_examples_with_badger | passed | | examples_examples_simplest | passed | | examples_examples_simple_prod_with_volumes | passed | | examples_examples_simple_prod | failed | | examples_examples_business_application_injected_sidecar | passed | | examples_examples_openshift_with_htpasswd | passed | | examples_examples_openshift_agent_as_daemonset | passed | | examples_examples_collector_with_priority_class | passed | | examples_examples_all_in_one_with_options | passed | | examples_examples_auto_provision_kafka | failed | | examples_examples_agent_with_priority_class | failed | | examples_examples_agent_as_daemonset | failed | +---------------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh generate false true + '[' 3 -ne 3 ']' + test_suite_name=generate + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/generate.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-generate make[2]: Entering directory '/tmp/jaeger-tests' test -s /tmp/jaeger-tests/bin/operator-sdk || curl -sLo /tmp/jaeger-tests/bin/operator-sdk https://github.com/operator-framework/operator-sdk/releases/download/v1.27.0/operator-sdk_`go env GOOS`_`go env GOARCH` ./hack/install/install-golangci-lint.sh Installing golangci-lint golangci-lint 1.53.2 is installed already ./hack/install/install-goimports.sh Installing goimports Try 0... go install golang.org/x/tools/cmd/goimports@v0.1.12 >>>> Formatting code... ./.ci/format.sh >>>> Building... ./hack/install/install-dependencies.sh Installing go dependencies Try 0... go mod download GOOS= GOARCH= CGO_ENABLED=0 GO111MODULE=on go build -ldflags "-X "github.com/jaegertracing/jaeger-operator/pkg/version".version="1.51.0" -X "github.com/jaegertracing/jaeger-operator/pkg/version".buildDate=2023-12-11T08:36:41Z -X "github.com/jaegertracing/jaeger-operator/pkg/version".defaultJaeger="1.51.0"" -o "bin/jaeger-operator" main.go JAEGER_VERSION="1.51.0" ./tests/e2e/generate/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-09-012410 True False 148m Cluster version is 4.15.0-0.nightly-2023-12-09-012410' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-09-012410 True False 148m Cluster version is 4.15.0-0.nightly-2023-12-09-012410' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/generate/render.sh ++ export SUITE_DIR=./tests/e2e/generate ++ SUITE_DIR=./tests/e2e/generate ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/generate ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + '[' true = true ']' + skip_test generate 'This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 2 -ne 2 ']' + test_name=generate + message='This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/generate/_build + '[' _build '!=' _build ']' + rm -rf generate + warning 'generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed\e[0m' WAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running generate E2E tests' Running generate E2E tests + cd tests/e2e/generate/_build + set +e + KUBECONFIG=/tmp/kubeconfig-282053367 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 09:14:37 | artifacts | Creating namespace: kuttl-test-assured-pegasus logger.go:42: 09:14:37 | artifacts | artifacts events from ns kuttl-test-assured-pegasus: logger.go:42: 09:14:37 | artifacts | Deleting namespace: kuttl-test-assured-pegasus === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (5.85s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.69s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name generate --report --output /logs/artifacts/generate.xml ./artifacts/kuttl-report.xml time="2023-12-11T09:14:45Z" level=debug msg="Setting a new name for the test suites" time="2023-12-11T09:14:45Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-11T09:14:45Z" level=debug msg="normalizing test case names" time="2023-12-11T09:14:45Z" level=debug msg="generate/artifacts -> generate_artifacts" +--------------------+--------+ | NAME | RESULT | +--------------------+--------+ | generate_artifacts | passed | +--------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh miscellaneous false true + '[' 3 -ne 3 ']' + test_suite_name=miscellaneous + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/miscellaneous.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-miscellaneous make[2]: Entering directory '/tmp/jaeger-tests' SKIP_ES_EXTERNAL=true ./tests/e2e/miscellaneous/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-09-012410 True False 149m Cluster version is 4.15.0-0.nightly-2023-12-09-012410' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-09-012410 True False 149m Cluster version is 4.15.0-0.nightly-2023-12-09-012410' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/miscellaneous/render.sh ++ export SUITE_DIR=./tests/e2e/miscellaneous ++ SUITE_DIR=./tests/e2e/miscellaneous ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/miscellaneous ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + '[' true = true ']' + skip_test cassandra-spark 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=cassandra-spark + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + rm -rf cassandra-spark + warning 'cassandra-spark: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: cassandra-spark: Test not supported in OpenShift\e[0m' WAR: cassandra-spark: Test not supported in OpenShift + start_test collector-autoscale + '[' 1 -ne 1 ']' + test_name=collector-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-autoscale\e[0m' Rendering files for test collector-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p collector-autoscale + cd collector-autoscale + jaeger_name=simple-prod + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + ELASTICSEARCH_NODECOUNT=1 + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.resources.requests.memory="200m"' 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.autoscale=true 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.minReplicas=1 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.maxReplicas=2 01-install.yaml + kubectl api-versions + grep autoscaling/v2beta2 -q + rm ./04-assert.yaml + generate_otlp_e2e_tests http + test_protocol=http + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-http\e[0m' Rendering files for test collector-otlp-allinone-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-autoscale + '[' collector-autoscale '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-http + cd collector-otlp-allinone-http + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger http true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-http\e[0m' Rendering files for test collector-otlp-production-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-http + '[' collector-otlp-allinone-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-http + cd collector-otlp-production-http + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger http true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + generate_otlp_e2e_tests grpc + test_protocol=grpc + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-grpc\e[0m' Rendering files for test collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-http + '[' collector-otlp-production-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-grpc + cd collector-otlp-allinone-grpc + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-grpc\e[0m' Rendering files for test collector-otlp-production-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-grpc + '[' collector-otlp-allinone-grpc '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-grpc + cd collector-otlp-production-grpc + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + '[' true = true ']' + skip_test istio 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=istio + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-grpc + '[' collector-otlp-production-grpc '!=' _build ']' + cd .. + rm -rf istio + warning 'istio: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: istio: Test not supported in OpenShift\e[0m' WAR: istio: Test not supported in OpenShift + '[' true = true ']' + skip_test outside-cluster 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=outside-cluster + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + rm -rf outside-cluster + warning 'outside-cluster: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: outside-cluster: Test not supported in OpenShift\e[0m' WAR: outside-cluster: Test not supported in OpenShift + start_test set-custom-img + '[' 1 -ne 1 ']' + test_name=set-custom-img + echo =========================================================================== =========================================================================== + info 'Rendering files for test set-custom-img' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test set-custom-img\e[0m' Rendering files for test set-custom-img + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p set-custom-img + cd set-custom-img + jaeger_name=my-jaeger + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.image="test"' ./02-install.yaml + '[' true = true ']' + skip_test non-cluster-wide 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=non-cluster-wide + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/set-custom-img + '[' set-custom-img '!=' _build ']' + cd .. + rm -rf non-cluster-wide + warning 'non-cluster-wide: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: non-cluster-wide: Test not supported in OpenShift\e[0m' WAR: non-cluster-wide: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running miscellaneous E2E tests' Running miscellaneous E2E tests + cd tests/e2e/miscellaneous/_build + set +e + KUBECONFIG=/tmp/kubeconfig-282053367 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 7 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/collector-autoscale === PAUSE kuttl/harness/collector-autoscale === RUN kuttl/harness/collector-otlp-allinone-grpc === PAUSE kuttl/harness/collector-otlp-allinone-grpc === RUN kuttl/harness/collector-otlp-allinone-http === PAUSE kuttl/harness/collector-otlp-allinone-http === RUN kuttl/harness/collector-otlp-production-grpc === PAUSE kuttl/harness/collector-otlp-production-grpc === RUN kuttl/harness/collector-otlp-production-http === PAUSE kuttl/harness/collector-otlp-production-http === RUN kuttl/harness/set-custom-img === PAUSE kuttl/harness/set-custom-img === CONT kuttl/harness/artifacts logger.go:42: 09:14:58 | artifacts | Creating namespace: kuttl-test-probable-moray logger.go:42: 09:14:58 | artifacts | artifacts events from ns kuttl-test-probable-moray: logger.go:42: 09:14:58 | artifacts | Deleting namespace: kuttl-test-probable-moray === CONT kuttl/harness/collector-otlp-production-grpc logger.go:42: 09:15:04 | collector-otlp-production-grpc | Creating namespace: kuttl-test-touching-pelican logger.go:42: 09:15:04 | collector-otlp-production-grpc/1-install | starting test step 1-install logger.go:42: 09:15:04 | collector-otlp-production-grpc/1-install | Jaeger:kuttl-test-touching-pelican/my-jaeger created logger.go:42: 09:15:41 | collector-otlp-production-grpc/1-install | test step completed 1-install logger.go:42: 09:15:41 | collector-otlp-production-grpc/2-smoke-test | starting test step 2-smoke-test logger.go:42: 09:15:41 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 09:15:44 | collector-otlp-production-grpc/2-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 09:15:50 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c REPORTING_PROTOCOL=grpc ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 09:15:51 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 09:15:52 | collector-otlp-production-grpc/2-smoke-test | job.batch/report-span created logger.go:42: 09:15:52 | collector-otlp-production-grpc/2-smoke-test | job.batch/check-span created logger.go:42: 09:16:11 | collector-otlp-production-grpc/2-smoke-test | test step completed 2-smoke-test logger.go:42: 09:16:11 | collector-otlp-production-grpc | collector-otlp-production-grpc events from ns kuttl-test-touching-pelican: logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:11 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltesttouchingpelicanmyjaeger-1-56bc4b6cd4 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltesttouchingpelicanmyjaeger-1-56bc4x92jf replicaset-controller logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:11 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttouchingpelicanmyjaeger-1-56bc4x92jf Binding Scheduled Successfully assigned kuttl-test-touching-pelican/elasticsearch-cdm-kuttltesttouchingpelicanmyjaeger-1-56bc4x92jf to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:11 +0000 UTC Warning Pod elasticsearch-cdm-kuttltesttouchingpelicanmyjaeger-1-56bc4x92jf FailedMount MountVolume.SetUp failed for volume "elasticsearch-metrics" : secret "elasticsearch-metrics" not found kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:11 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltesttouchingpelicanmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltesttouchingpelicanmyjaeger-1-56bc4b6cd4 to 1 deployment-controller logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:12 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttouchingpelicanmyjaeger-1-56bc4x92jf AddedInterface Add eth0 [10.129.2.60/23] from ovn-kubernetes logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:12 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttouchingpelicanmyjaeger-1-56bc4x92jf.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:12 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttouchingpelicanmyjaeger-1-56bc4x92jf.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:12 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttouchingpelicanmyjaeger-1-56bc4x92jf.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:12 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttouchingpelicanmyjaeger-1-56bc4x92jf.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:12 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttouchingpelicanmyjaeger-1-56bc4x92jf.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:12 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttouchingpelicanmyjaeger-1-56bc4x92jf.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:22 +0000 UTC Warning Pod elasticsearch-cdm-kuttltesttouchingpelicanmyjaeger-1-56bc4x92jf.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:27 +0000 UTC Warning Pod elasticsearch-cdm-kuttltesttouchingpelicanmyjaeger-1-56bc4x92jf.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:37 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-6cb77f9b79 to 1 deployment-controller logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:37 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-8948cb8d6 to 1 deployment-controller logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:38 +0000 UTC Normal Pod my-jaeger-collector-6cb77f9b79-k5zrt Binding Scheduled Successfully assigned kuttl-test-touching-pelican/my-jaeger-collector-6cb77f9b79-k5zrt to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:38 +0000 UTC Normal Pod my-jaeger-collector-6cb77f9b79-k5zrt AddedInterface Add eth0 [10.131.0.84/23] from ovn-kubernetes logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:38 +0000 UTC Normal Pod my-jaeger-collector-6cb77f9b79-k5zrt.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:38 +0000 UTC Normal Pod my-jaeger-collector-6cb77f9b79-k5zrt.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:38 +0000 UTC Normal Pod my-jaeger-collector-6cb77f9b79-k5zrt.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:38 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-6cb77f9b79 SuccessfulCreate Created pod: my-jaeger-collector-6cb77f9b79-k5zrt replicaset-controller logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:38 +0000 UTC Normal Pod my-jaeger-query-8948cb8d6-g4k7p Binding Scheduled Successfully assigned kuttl-test-touching-pelican/my-jaeger-query-8948cb8d6-g4k7p to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:38 +0000 UTC Normal Pod my-jaeger-query-8948cb8d6-g4k7p AddedInterface Add eth0 [10.131.0.83/23] from ovn-kubernetes logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:38 +0000 UTC Normal Pod my-jaeger-query-8948cb8d6-g4k7p.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:38 +0000 UTC Normal Pod my-jaeger-query-8948cb8d6-g4k7p.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:38 +0000 UTC Normal Pod my-jaeger-query-8948cb8d6-g4k7p.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:38 +0000 UTC Normal Pod my-jaeger-query-8948cb8d6-g4k7p.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:38 +0000 UTC Normal Pod my-jaeger-query-8948cb8d6-g4k7p.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:38 +0000 UTC Normal Pod my-jaeger-query-8948cb8d6-g4k7p.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:38 +0000 UTC Normal Pod my-jaeger-query-8948cb8d6-g4k7p.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:38 +0000 UTC Normal Pod my-jaeger-query-8948cb8d6-g4k7p.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:38 +0000 UTC Normal Pod my-jaeger-query-8948cb8d6-g4k7p.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:38 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-8948cb8d6 SuccessfulCreate Created pod: my-jaeger-query-8948cb8d6-g4k7p replicaset-controller logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:45 +0000 UTC Normal Pod my-jaeger-query-8948cb8d6-g4k7p.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:45 +0000 UTC Normal Pod my-jaeger-query-8948cb8d6-g4k7p.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:45 +0000 UTC Normal Pod my-jaeger-query-8948cb8d6-g4k7p.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:45 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-8948cb8d6 SuccessfulDelete Deleted pod: my-jaeger-query-8948cb8d6-g4k7p replicaset-controller logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:45 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-8948cb8d6 to 0 from 1 deployment-controller logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:46 +0000 UTC Normal Pod my-jaeger-query-5f8bb65758-mq9mv Binding Scheduled Successfully assigned kuttl-test-touching-pelican/my-jaeger-query-5f8bb65758-mq9mv to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:46 +0000 UTC Normal Pod my-jaeger-query-5f8bb65758-mq9mv AddedInterface Add eth0 [10.131.0.85/23] from ovn-kubernetes logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:46 +0000 UTC Normal Pod my-jaeger-query-5f8bb65758-mq9mv.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:46 +0000 UTC Normal Pod my-jaeger-query-5f8bb65758-mq9mv.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:46 +0000 UTC Normal Pod my-jaeger-query-5f8bb65758-mq9mv.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:46 +0000 UTC Normal Pod my-jaeger-query-5f8bb65758-mq9mv.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:46 +0000 UTC Normal Pod my-jaeger-query-5f8bb65758-mq9mv.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:46 +0000 UTC Normal Pod my-jaeger-query-5f8bb65758-mq9mv.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:46 +0000 UTC Normal Pod my-jaeger-query-5f8bb65758-mq9mv.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:46 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-5f8bb65758 SuccessfulCreate Created pod: my-jaeger-query-5f8bb65758-mq9mv replicaset-controller logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:46 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-5f8bb65758 to 1 deployment-controller logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:47 +0000 UTC Normal Pod my-jaeger-query-5f8bb65758-mq9mv.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:47 +0000 UTC Normal Pod my-jaeger-query-5f8bb65758-mq9mv.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:52 +0000 UTC Normal Pod check-span-2xpjg Binding Scheduled Successfully assigned kuttl-test-touching-pelican/check-span-2xpjg to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:52 +0000 UTC Normal Pod check-span-2xpjg AddedInterface Add eth0 [10.128.2.76/23] from ovn-kubernetes logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:52 +0000 UTC Normal Pod check-span-2xpjg.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:52 +0000 UTC Normal Pod check-span-2xpjg.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:52 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-2xpjg job-controller logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:52 +0000 UTC Normal Pod report-span-p9xmm Binding Scheduled Successfully assigned kuttl-test-touching-pelican/report-span-p9xmm to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:52 +0000 UTC Normal Pod report-span-p9xmm AddedInterface Add eth0 [10.128.2.75/23] from ovn-kubernetes logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:52 +0000 UTC Normal Pod report-span-p9xmm.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:52 +0000 UTC Normal Pod report-span-p9xmm.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:52 +0000 UTC Normal Pod report-span-p9xmm.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:52 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-p9xmm job-controller logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:53 +0000 UTC Normal Pod check-span-2xpjg.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:53 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:53 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:15:53 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:16:11 | collector-otlp-production-grpc | 2023-12-11 09:16:11 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 09:16:11 | collector-otlp-production-grpc | Deleting namespace: kuttl-test-touching-pelican === CONT kuttl/harness/set-custom-img logger.go:42: 09:16:23 | set-custom-img | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:16:23 | set-custom-img | Ignoring check-collector-img.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:18:21 | set-custom-img | Creating namespace: kuttl-test-mature-kite logger.go:42: 09:18:21 | set-custom-img/1-install | starting test step 1-install logger.go:42: 09:18:21 | set-custom-img/1-install | Jaeger:kuttl-test-mature-kite/my-jaeger created logger.go:42: 09:18:57 | set-custom-img/1-install | test step completed 1-install logger.go:42: 09:18:57 | set-custom-img/2-install | starting test step 2-install logger.go:42: 09:18:57 | set-custom-img/2-install | Jaeger:kuttl-test-mature-kite/my-jaeger updated logger.go:42: 09:18:57 | set-custom-img/2-install | test step completed 2-install logger.go:42: 09:18:57 | set-custom-img/3-check-image | starting test step 3-check-image logger.go:42: 09:18:57 | set-custom-img/3-check-image | running command: [sh -c ./check-collector-img.sh] logger.go:42: 09:18:57 | set-custom-img/3-check-image | Collector image missmatch. Expected: test. Has: registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9 logger.go:42: 09:19:02 | set-custom-img/3-check-image | Collector image asserted properly! logger.go:42: 09:19:02 | set-custom-img/3-check-image | test step completed 3-check-image logger.go:42: 09:19:03 | set-custom-img | set-custom-img events from ns kuttl-test-mature-kite: logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:27 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestmaturekitemyjaeger-1-5678544b4d SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestmaturekitemyjaeger-1-5678544b4dh2zd2 replicaset-controller logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmaturekitemyjaeger-1-5678544b4dh2zd2 Binding Scheduled Successfully assigned kuttl-test-mature-kite/elasticsearch-cdm-kuttltestmaturekitemyjaeger-1-5678544b4dh2zd2 to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmaturekitemyjaeger-1-5678544b4dh2zd2 AddedInterface Add eth0 [10.129.2.61/23] from ovn-kubernetes logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmaturekitemyjaeger-1-5678544b4dh2zd2.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:27 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestmaturekitemyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestmaturekitemyjaeger-1-5678544b4d to 1 deployment-controller logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:28 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmaturekitemyjaeger-1-5678544b4dh2zd2.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:28 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmaturekitemyjaeger-1-5678544b4dh2zd2.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:28 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmaturekitemyjaeger-1-5678544b4dh2zd2.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:28 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmaturekitemyjaeger-1-5678544b4dh2zd2.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:28 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmaturekitemyjaeger-1-5678544b4dh2zd2.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:43 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestmaturekitemyjaeger-1-5678544b4dh2zd2.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:54 +0000 UTC Normal Pod my-jaeger-collector-5c4cdb4f89-jrlwf Binding Scheduled Successfully assigned kuttl-test-mature-kite/my-jaeger-collector-5c4cdb4f89-jrlwf to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:54 +0000 UTC Normal Pod my-jaeger-collector-5c4cdb4f89-jrlwf AddedInterface Add eth0 [10.131.0.86/23] from ovn-kubernetes logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:54 +0000 UTC Normal Pod my-jaeger-collector-5c4cdb4f89-jrlwf.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:54 +0000 UTC Normal Pod my-jaeger-collector-5c4cdb4f89-jrlwf.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:54 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5c4cdb4f89 SuccessfulCreate Created pod: my-jaeger-collector-5c4cdb4f89-jrlwf replicaset-controller logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:54 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-5c4cdb4f89 to 1 deployment-controller logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:54 +0000 UTC Normal Pod my-jaeger-query-7b54b4d486-ldnfw Binding Scheduled Successfully assigned kuttl-test-mature-kite/my-jaeger-query-7b54b4d486-ldnfw to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:54 +0000 UTC Normal Pod my-jaeger-query-7b54b4d486-ldnfw AddedInterface Add eth0 [10.128.2.77/23] from ovn-kubernetes logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:54 +0000 UTC Normal Pod my-jaeger-query-7b54b4d486-ldnfw.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:54 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-7b54b4d486 SuccessfulCreate Created pod: my-jaeger-query-7b54b4d486-ldnfw replicaset-controller logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:54 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-7b54b4d486 to 1 deployment-controller logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:55 +0000 UTC Normal Pod my-jaeger-collector-5c4cdb4f89-jrlwf.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:55 +0000 UTC Normal Pod my-jaeger-query-7b54b4d486-ldnfw.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:55 +0000 UTC Normal Pod my-jaeger-query-7b54b4d486-ldnfw.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:55 +0000 UTC Normal Pod my-jaeger-query-7b54b4d486-ldnfw.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:55 +0000 UTC Normal Pod my-jaeger-query-7b54b4d486-ldnfw.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:55 +0000 UTC Normal Pod my-jaeger-query-7b54b4d486-ldnfw.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:55 +0000 UTC Normal Pod my-jaeger-query-7b54b4d486-ldnfw.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:55 +0000 UTC Normal Pod my-jaeger-query-7b54b4d486-ldnfw.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:55 +0000 UTC Normal Pod my-jaeger-query-7b54b4d486-ldnfw.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:59 +0000 UTC Normal Pod my-jaeger-collector-57648bd9cf-wzb54 Binding Scheduled Successfully assigned kuttl-test-mature-kite/my-jaeger-collector-57648bd9cf-wzb54 to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:59 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-57648bd9cf SuccessfulCreate Created pod: my-jaeger-collector-57648bd9cf-wzb54 replicaset-controller logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:59 +0000 UTC Normal Pod my-jaeger-collector-5c4cdb4f89-jrlwf.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:59 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5c4cdb4f89 SuccessfulDelete Deleted pod: my-jaeger-collector-5c4cdb4f89-jrlwf replicaset-controller logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:59 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled down replica set my-jaeger-collector-5c4cdb4f89 to 0 from 1 deployment-controller logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:18:59 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-57648bd9cf to 1 deployment-controller logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:19:00 +0000 UTC Normal Pod my-jaeger-collector-57648bd9cf-wzb54 AddedInterface Add eth0 [10.131.0.87/23] from ovn-kubernetes logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:19:00 +0000 UTC Normal Pod my-jaeger-collector-57648bd9cf-wzb54.spec.containers{jaeger-collector} Pulling Pulling image "test" kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:19:00 +0000 UTC Warning Pod my-jaeger-collector-57648bd9cf-wzb54.spec.containers{jaeger-collector} Failed Failed to pull image "test": reading manifest latest in docker.io/library/test: requested access to the resource is denied kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:19:00 +0000 UTC Warning Pod my-jaeger-collector-57648bd9cf-wzb54.spec.containers{jaeger-collector} Failed Error: ErrImagePull kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:19:00 +0000 UTC Normal Pod my-jaeger-collector-57648bd9cf-wzb54.spec.containers{jaeger-collector} BackOff Back-off pulling image "test" kubelet logger.go:42: 09:19:03 | set-custom-img | 2023-12-11 09:19:00 +0000 UTC Warning Pod my-jaeger-collector-57648bd9cf-wzb54.spec.containers{jaeger-collector} Failed Error: ImagePullBackOff kubelet logger.go:42: 09:19:03 | set-custom-img | Deleting namespace: kuttl-test-mature-kite === CONT kuttl/harness/collector-otlp-production-http logger.go:42: 09:19:09 | collector-otlp-production-http | Creating namespace: kuttl-test-quiet-marten logger.go:42: 09:19:09 | collector-otlp-production-http/1-install | starting test step 1-install logger.go:42: 09:19:09 | collector-otlp-production-http/1-install | Jaeger:kuttl-test-quiet-marten/my-jaeger created logger.go:42: 09:19:45 | collector-otlp-production-http/1-install | test step completed 1-install logger.go:42: 09:19:45 | collector-otlp-production-http/2-smoke-test | starting test step 2-smoke-test logger.go:42: 09:19:45 | collector-otlp-production-http/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 09:19:47 | collector-otlp-production-http/2-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 09:19:53 | collector-otlp-production-http/2-smoke-test | running command: [sh -c REPORTING_PROTOCOL=http ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 09:19:54 | collector-otlp-production-http/2-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 09:19:54 | collector-otlp-production-http/2-smoke-test | job.batch/report-span created logger.go:42: 09:19:54 | collector-otlp-production-http/2-smoke-test | job.batch/check-span created logger.go:42: 09:20:06 | collector-otlp-production-http/2-smoke-test | test step completed 2-smoke-test logger.go:42: 09:20:06 | collector-otlp-production-http | collector-otlp-production-http events from ns kuttl-test-quiet-marten: logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:15 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestquietmartenmyjaeger-1-76545d94b4 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestquietmartenmyjaeger-1-76545d94bk8gr5 replicaset-controller logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:15 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestquietmartenmyjaeger-1-76545d94bk8gr5 Binding Scheduled Successfully assigned kuttl-test-quiet-marten/elasticsearch-cdm-kuttltestquietmartenmyjaeger-1-76545d94bk8gr5 to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:15 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestquietmartenmyjaeger-1-76545d94bk8gr5 AddedInterface Add eth0 [10.129.2.62/23] from ovn-kubernetes logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:15 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestquietmartenmyjaeger-1-76545d94bk8gr5.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:15 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestquietmartenmyjaeger-1-76545d94bk8gr5.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:15 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestquietmartenmyjaeger-1-76545d94bk8gr5.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:15 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestquietmartenmyjaeger-1-76545d94bk8gr5.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:15 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestquietmartenmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestquietmartenmyjaeger-1-76545d94b4 to 1 deployment-controller logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:16 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestquietmartenmyjaeger-1-76545d94bk8gr5.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:16 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestquietmartenmyjaeger-1-76545d94bk8gr5.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:25 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestquietmartenmyjaeger-1-76545d94bk8gr5.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:31 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestquietmartenmyjaeger-1-76545d94bk8gr5.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:42 +0000 UTC Normal Pod my-jaeger-collector-6998b7c585-k5bsk Binding Scheduled Successfully assigned kuttl-test-quiet-marten/my-jaeger-collector-6998b7c585-k5bsk to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:42 +0000 UTC Warning Pod my-jaeger-collector-6998b7c585-k5bsk FailedMount MountVolume.SetUp failed for volume "my-jaeger-collector-tls-config-volume" : secret "my-jaeger-collector-headless-tls" not found kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:42 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-6998b7c585 SuccessfulCreate Created pod: my-jaeger-collector-6998b7c585-k5bsk replicaset-controller logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:42 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-6998b7c585 to 1 deployment-controller logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:42 +0000 UTC Normal Pod my-jaeger-query-d9ccdccd5-zwpwq Binding Scheduled Successfully assigned kuttl-test-quiet-marten/my-jaeger-query-d9ccdccd5-zwpwq to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:42 +0000 UTC Normal Pod my-jaeger-query-d9ccdccd5-zwpwq AddedInterface Add eth0 [10.131.0.88/23] from ovn-kubernetes logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:42 +0000 UTC Normal Pod my-jaeger-query-d9ccdccd5-zwpwq.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:42 +0000 UTC Normal Pod my-jaeger-query-d9ccdccd5-zwpwq.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:42 +0000 UTC Normal Pod my-jaeger-query-d9ccdccd5-zwpwq.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:42 +0000 UTC Normal Pod my-jaeger-query-d9ccdccd5-zwpwq.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:42 +0000 UTC Normal Pod my-jaeger-query-d9ccdccd5-zwpwq.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:42 +0000 UTC Normal Pod my-jaeger-query-d9ccdccd5-zwpwq.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:42 +0000 UTC Normal Pod my-jaeger-query-d9ccdccd5-zwpwq.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:42 +0000 UTC Normal Pod my-jaeger-query-d9ccdccd5-zwpwq.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:42 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-d9ccdccd5 SuccessfulCreate Created pod: my-jaeger-query-d9ccdccd5-zwpwq replicaset-controller logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:42 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-d9ccdccd5 to 1 deployment-controller logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:43 +0000 UTC Normal Pod my-jaeger-collector-6998b7c585-k5bsk AddedInterface Add eth0 [10.128.2.78/23] from ovn-kubernetes logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:43 +0000 UTC Normal Pod my-jaeger-collector-6998b7c585-k5bsk.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:43 +0000 UTC Normal Pod my-jaeger-collector-6998b7c585-k5bsk.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:43 +0000 UTC Normal Pod my-jaeger-collector-6998b7c585-k5bsk.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:43 +0000 UTC Normal Pod my-jaeger-query-d9ccdccd5-zwpwq.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:49 +0000 UTC Normal Pod my-jaeger-query-74bdf7b765-zd7vk Binding Scheduled Successfully assigned kuttl-test-quiet-marten/my-jaeger-query-74bdf7b765-zd7vk to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:49 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-74bdf7b765 SuccessfulCreate Created pod: my-jaeger-query-74bdf7b765-zd7vk replicaset-controller logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:49 +0000 UTC Normal Pod my-jaeger-query-d9ccdccd5-zwpwq.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:49 +0000 UTC Normal Pod my-jaeger-query-d9ccdccd5-zwpwq.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:49 +0000 UTC Normal Pod my-jaeger-query-d9ccdccd5-zwpwq.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:49 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-d9ccdccd5 SuccessfulDelete Deleted pod: my-jaeger-query-d9ccdccd5-zwpwq replicaset-controller logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:49 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-d9ccdccd5 to 0 from 1 deployment-controller logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:49 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-74bdf7b765 to 1 deployment-controller logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:50 +0000 UTC Normal Pod my-jaeger-query-74bdf7b765-zd7vk AddedInterface Add eth0 [10.131.0.89/23] from ovn-kubernetes logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:50 +0000 UTC Normal Pod my-jaeger-query-74bdf7b765-zd7vk.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:50 +0000 UTC Normal Pod my-jaeger-query-74bdf7b765-zd7vk.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:50 +0000 UTC Normal Pod my-jaeger-query-74bdf7b765-zd7vk.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:50 +0000 UTC Normal Pod my-jaeger-query-74bdf7b765-zd7vk.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:50 +0000 UTC Normal Pod my-jaeger-query-74bdf7b765-zd7vk.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:50 +0000 UTC Normal Pod my-jaeger-query-74bdf7b765-zd7vk.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:50 +0000 UTC Normal Pod my-jaeger-query-74bdf7b765-zd7vk.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:50 +0000 UTC Normal Pod my-jaeger-query-74bdf7b765-zd7vk.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:50 +0000 UTC Normal Pod my-jaeger-query-74bdf7b765-zd7vk.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:54 +0000 UTC Normal Pod check-span-qgrp5 Binding Scheduled Successfully assigned kuttl-test-quiet-marten/check-span-qgrp5 to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:54 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-qgrp5 job-controller logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:54 +0000 UTC Normal Pod report-span-bqj72 Binding Scheduled Successfully assigned kuttl-test-quiet-marten/report-span-bqj72 to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:54 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-bqj72 job-controller logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:55 +0000 UTC Normal Pod check-span-qgrp5 AddedInterface Add eth0 [10.128.2.80/23] from ovn-kubernetes logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:55 +0000 UTC Normal Pod check-span-qgrp5.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:55 +0000 UTC Normal Pod check-span-qgrp5.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:55 +0000 UTC Normal Pod check-span-qgrp5.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:55 +0000 UTC Normal Pod report-span-bqj72 AddedInterface Add eth0 [10.128.2.79/23] from ovn-kubernetes logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:55 +0000 UTC Normal Pod report-span-bqj72.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:55 +0000 UTC Normal Pod report-span-bqj72.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:55 +0000 UTC Normal Pod report-span-bqj72.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:57 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:57 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:19:57 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:20:06 | collector-otlp-production-http | 2023-12-11 09:20:06 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 09:20:06 | collector-otlp-production-http | Deleting namespace: kuttl-test-quiet-marten === CONT kuttl/harness/collector-otlp-allinone-grpc logger.go:42: 09:20:18 | collector-otlp-allinone-grpc | Creating namespace: kuttl-test-probable-perch logger.go:42: 09:20:18 | collector-otlp-allinone-grpc/0-install | starting test step 0-install logger.go:42: 09:20:18 | collector-otlp-allinone-grpc/0-install | Jaeger:kuttl-test-probable-perch/my-jaeger created logger.go:42: 09:20:25 | collector-otlp-allinone-grpc/0-install | test step completed 0-install logger.go:42: 09:20:25 | collector-otlp-allinone-grpc/1-smoke-test | starting test step 1-smoke-test logger.go:42: 09:20:25 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 09:20:26 | collector-otlp-allinone-grpc/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 09:20:33 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=grpc ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 09:20:33 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 09:20:34 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/report-span created logger.go:42: 09:20:34 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/check-span created logger.go:42: 09:20:53 | collector-otlp-allinone-grpc/1-smoke-test | test step completed 1-smoke-test logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | collector-otlp-allinone-grpc events from ns kuttl-test-probable-perch: logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:22 +0000 UTC Normal Pod my-jaeger-64586f7c6f-j62vb Binding Scheduled Successfully assigned kuttl-test-probable-perch/my-jaeger-64586f7c6f-j62vb to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:22 +0000 UTC Normal Pod my-jaeger-64586f7c6f-j62vb AddedInterface Add eth0 [10.129.2.63/23] from ovn-kubernetes logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:22 +0000 UTC Normal ReplicaSet.apps my-jaeger-64586f7c6f SuccessfulCreate Created pod: my-jaeger-64586f7c6f-j62vb replicaset-controller logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:22 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-64586f7c6f to 1 deployment-controller logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:23 +0000 UTC Normal Pod my-jaeger-64586f7c6f-j62vb.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:23 +0000 UTC Normal Pod my-jaeger-64586f7c6f-j62vb.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:23 +0000 UTC Normal Pod my-jaeger-64586f7c6f-j62vb.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:23 +0000 UTC Normal Pod my-jaeger-64586f7c6f-j62vb.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:23 +0000 UTC Normal Pod my-jaeger-64586f7c6f-j62vb.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:23 +0000 UTC Normal Pod my-jaeger-64586f7c6f-j62vb.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:28 +0000 UTC Normal Pod my-jaeger-64586f7c6f-j62vb.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:28 +0000 UTC Normal Pod my-jaeger-64586f7c6f-j62vb.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:28 +0000 UTC Normal ReplicaSet.apps my-jaeger-64586f7c6f SuccessfulDelete Deleted pod: my-jaeger-64586f7c6f-j62vb replicaset-controller logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:28 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-64586f7c6f to 0 from 1 deployment-controller logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:29 +0000 UTC Normal Pod my-jaeger-7cf94f766d-trdlq Binding Scheduled Successfully assigned kuttl-test-probable-perch/my-jaeger-7cf94f766d-trdlq to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:29 +0000 UTC Normal Pod my-jaeger-7cf94f766d-trdlq AddedInterface Add eth0 [10.129.2.64/23] from ovn-kubernetes logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:29 +0000 UTC Normal Pod my-jaeger-7cf94f766d-trdlq.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:29 +0000 UTC Normal Pod my-jaeger-7cf94f766d-trdlq.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:29 +0000 UTC Normal Pod my-jaeger-7cf94f766d-trdlq.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:29 +0000 UTC Normal Pod my-jaeger-7cf94f766d-trdlq.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:29 +0000 UTC Normal Pod my-jaeger-7cf94f766d-trdlq.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:29 +0000 UTC Normal Pod my-jaeger-7cf94f766d-trdlq.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:29 +0000 UTC Normal ReplicaSet.apps my-jaeger-7cf94f766d SuccessfulCreate Created pod: my-jaeger-7cf94f766d-trdlq replicaset-controller logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:29 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-7cf94f766d to 1 deployment-controller logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:34 +0000 UTC Normal Pod check-span-9vrpw Binding Scheduled Successfully assigned kuttl-test-probable-perch/check-span-9vrpw to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:34 +0000 UTC Normal Pod check-span-9vrpw AddedInterface Add eth0 [10.128.2.81/23] from ovn-kubernetes logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:34 +0000 UTC Normal Pod check-span-9vrpw.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:34 +0000 UTC Normal Pod check-span-9vrpw.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:34 +0000 UTC Normal Pod check-span-9vrpw.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:34 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-9vrpw job-controller logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:34 +0000 UTC Normal Pod report-span-zgcx5 Binding Scheduled Successfully assigned kuttl-test-probable-perch/report-span-zgcx5 to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:34 +0000 UTC Normal Pod report-span-zgcx5 AddedInterface Add eth0 [10.131.0.90/23] from ovn-kubernetes logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:34 +0000 UTC Normal Pod report-span-zgcx5.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:34 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-zgcx5 job-controller logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:35 +0000 UTC Normal Pod report-span-zgcx5.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:35 +0000 UTC Normal Pod report-span-zgcx5.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | 2023-12-11 09:20:53 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 09:20:53 | collector-otlp-allinone-grpc | Deleting namespace: kuttl-test-probable-perch === CONT kuttl/harness/collector-otlp-allinone-http logger.go:42: 09:21:05 | collector-otlp-allinone-http | Creating namespace: kuttl-test-huge-dragon logger.go:42: 09:21:05 | collector-otlp-allinone-http/0-install | starting test step 0-install logger.go:42: 09:21:05 | collector-otlp-allinone-http/0-install | Jaeger:kuttl-test-huge-dragon/my-jaeger created logger.go:42: 09:21:11 | collector-otlp-allinone-http/0-install | test step completed 0-install logger.go:42: 09:21:11 | collector-otlp-allinone-http/1-smoke-test | starting test step 1-smoke-test logger.go:42: 09:21:11 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 09:21:13 | collector-otlp-allinone-http/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 09:21:20 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=http ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 09:21:20 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 09:21:21 | collector-otlp-allinone-http/1-smoke-test | job.batch/report-span created logger.go:42: 09:21:21 | collector-otlp-allinone-http/1-smoke-test | job.batch/check-span created logger.go:42: 09:21:32 | collector-otlp-allinone-http/1-smoke-test | test step completed 1-smoke-test logger.go:42: 09:21:32 | collector-otlp-allinone-http | collector-otlp-allinone-http events from ns kuttl-test-huge-dragon: logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:09 +0000 UTC Normal Pod my-jaeger-6fb574c787-nrxfd Binding Scheduled Successfully assigned kuttl-test-huge-dragon/my-jaeger-6fb574c787-nrxfd to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:09 +0000 UTC Warning Pod my-jaeger-6fb574c787-nrxfd FailedMount MountVolume.SetUp failed for volume "my-jaeger-collector-tls-config-volume" : secret "my-jaeger-collector-headless-tls" not found kubelet logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:09 +0000 UTC Normal ReplicaSet.apps my-jaeger-6fb574c787 SuccessfulCreate Created pod: my-jaeger-6fb574c787-nrxfd replicaset-controller logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:09 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-6fb574c787 to 1 deployment-controller logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:10 +0000 UTC Normal Pod my-jaeger-6fb574c787-nrxfd AddedInterface Add eth0 [10.129.2.65/23] from ovn-kubernetes logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:10 +0000 UTC Normal Pod my-jaeger-6fb574c787-nrxfd.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:10 +0000 UTC Normal Pod my-jaeger-6fb574c787-nrxfd.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:10 +0000 UTC Normal Pod my-jaeger-6fb574c787-nrxfd.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:10 +0000 UTC Normal Pod my-jaeger-6fb574c787-nrxfd.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:10 +0000 UTC Normal Pod my-jaeger-6fb574c787-nrxfd.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:10 +0000 UTC Normal Pod my-jaeger-6fb574c787-nrxfd.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:16 +0000 UTC Normal Pod my-jaeger-6fb574c787-nrxfd.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:16 +0000 UTC Normal Pod my-jaeger-6fb574c787-nrxfd.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:16 +0000 UTC Normal ReplicaSet.apps my-jaeger-6fb574c787 SuccessfulDelete Deleted pod: my-jaeger-6fb574c787-nrxfd replicaset-controller logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:16 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-6fb574c787 to 0 from 1 deployment-controller logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:17 +0000 UTC Normal Pod my-jaeger-7dc6fdff95-rd7gd Binding Scheduled Successfully assigned kuttl-test-huge-dragon/my-jaeger-7dc6fdff95-rd7gd to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:17 +0000 UTC Normal Pod my-jaeger-7dc6fdff95-rd7gd AddedInterface Add eth0 [10.129.2.66/23] from ovn-kubernetes logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:17 +0000 UTC Normal Pod my-jaeger-7dc6fdff95-rd7gd.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:17 +0000 UTC Normal Pod my-jaeger-7dc6fdff95-rd7gd.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:17 +0000 UTC Normal Pod my-jaeger-7dc6fdff95-rd7gd.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:17 +0000 UTC Normal Pod my-jaeger-7dc6fdff95-rd7gd.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:17 +0000 UTC Normal Pod my-jaeger-7dc6fdff95-rd7gd.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:17 +0000 UTC Normal Pod my-jaeger-7dc6fdff95-rd7gd.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:17 +0000 UTC Normal ReplicaSet.apps my-jaeger-7dc6fdff95 SuccessfulCreate Created pod: my-jaeger-7dc6fdff95-rd7gd replicaset-controller logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:17 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-7dc6fdff95 to 1 deployment-controller logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:21 +0000 UTC Normal Pod check-span-xqlc8 Binding Scheduled Successfully assigned kuttl-test-huge-dragon/check-span-xqlc8 to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:21 +0000 UTC Normal Pod check-span-xqlc8 AddedInterface Add eth0 [10.128.2.82/23] from ovn-kubernetes logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:21 +0000 UTC Normal Pod check-span-xqlc8.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:21 +0000 UTC Normal Pod check-span-xqlc8.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:21 +0000 UTC Normal Pod check-span-xqlc8.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:21 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-xqlc8 job-controller logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:21 +0000 UTC Normal Pod report-span-dgllp Binding Scheduled Successfully assigned kuttl-test-huge-dragon/report-span-dgllp to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:21 +0000 UTC Normal Pod report-span-dgllp AddedInterface Add eth0 [10.131.0.91/23] from ovn-kubernetes logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:21 +0000 UTC Normal Pod report-span-dgllp.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:21 +0000 UTC Normal Pod report-span-dgllp.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:21 +0000 UTC Normal Pod report-span-dgllp.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:21 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-dgllp job-controller logger.go:42: 09:21:32 | collector-otlp-allinone-http | 2023-12-11 09:21:32 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 09:21:32 | collector-otlp-allinone-http | Deleting namespace: kuttl-test-huge-dragon === CONT kuttl/harness/collector-autoscale logger.go:42: 09:21:44 | collector-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:21:44 | collector-autoscale | Ignoring wait-for-hpa.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:21:44 | collector-autoscale | Creating namespace: kuttl-test-prime-chamois logger.go:42: 09:21:44 | collector-autoscale/1-install | starting test step 1-install logger.go:42: 09:21:44 | collector-autoscale/1-install | Jaeger:kuttl-test-prime-chamois/simple-prod created logger.go:42: 09:22:21 | collector-autoscale/1-install | test step completed 1-install logger.go:42: 09:22:21 | collector-autoscale/2-wait-for-hpa | starting test step 2-wait-for-hpa logger.go:42: 09:22:21 | collector-autoscale/2-wait-for-hpa | running command: [sh -c ./wait-for-hpa.sh] logger.go:42: 09:22:21 | collector-autoscale/2-wait-for-hpa | Some HPA metrics are not known yet logger.go:42: 09:22:22 | collector-autoscale/2-wait-for-hpa | test step completed 2-wait-for-hpa logger.go:42: 09:22:22 | collector-autoscale/3- | starting test step 3- logger.go:42: 09:22:22 | collector-autoscale/3- | test step completed 3- logger.go:42: 09:22:22 | collector-autoscale | collector-autoscale events from ns kuttl-test-prime-chamois: logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:21:51 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestprimechamoissimpleprod-1-64b7488674 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestprimechamoissimpleprod-1-64b748jqktc replicaset-controller logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:21:51 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestprimechamoissimpleprod-1-64b748jqktc Binding Scheduled Successfully assigned kuttl-test-prime-chamois/elasticsearch-cdm-kuttltestprimechamoissimpleprod-1-64b748jqktc to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:21:51 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestprimechamoissimpleprod-1-64b748jqktc AddedInterface Add eth0 [10.129.2.67/23] from ovn-kubernetes logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:21:51 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestprimechamoissimpleprod-1-64b748jqktc.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:21:51 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestprimechamoissimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestprimechamoissimpleprod-1-64b7488674 to 1 deployment-controller logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:21:52 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestprimechamoissimpleprod-1-64b748jqktc.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:21:52 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestprimechamoissimpleprod-1-64b748jqktc.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:21:52 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestprimechamoissimpleprod-1-64b748jqktc.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:21:52 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestprimechamoissimpleprod-1-64b748jqktc.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:21:52 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestprimechamoissimpleprod-1-64b748jqktc.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:07 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestprimechamoissimpleprod-1-64b748jqktc.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:18 +0000 UTC Normal Pod simple-prod-collector-64c6f545f9-qcppt Binding Scheduled Successfully assigned kuttl-test-prime-chamois/simple-prod-collector-64c6f545f9-qcppt to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:18 +0000 UTC Warning Pod simple-prod-collector-64c6f545f9-qcppt FailedMount MountVolume.SetUp failed for volume "simple-prod-collector-tls-config-volume" : secret "simple-prod-collector-headless-tls" not found kubelet logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:18 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-64c6f545f9 SuccessfulCreate Created pod: simple-prod-collector-64c6f545f9-qcppt replicaset-controller logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:18 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-64c6f545f9 to 1 deployment-controller logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:18 +0000 UTC Normal Pod simple-prod-query-75bf6fc9cc-krtrx Binding Scheduled Successfully assigned kuttl-test-prime-chamois/simple-prod-query-75bf6fc9cc-krtrx to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:18 +0000 UTC Normal Pod simple-prod-query-75bf6fc9cc-krtrx AddedInterface Add eth0 [10.128.2.83/23] from ovn-kubernetes logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:18 +0000 UTC Normal Pod simple-prod-query-75bf6fc9cc-krtrx.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:18 +0000 UTC Normal Pod simple-prod-query-75bf6fc9cc-krtrx.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:18 +0000 UTC Normal Pod simple-prod-query-75bf6fc9cc-krtrx.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:18 +0000 UTC Normal Pod simple-prod-query-75bf6fc9cc-krtrx.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:18 +0000 UTC Normal Pod simple-prod-query-75bf6fc9cc-krtrx.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:18 +0000 UTC Normal Pod simple-prod-query-75bf6fc9cc-krtrx.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:18 +0000 UTC Normal Pod simple-prod-query-75bf6fc9cc-krtrx.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:18 +0000 UTC Normal ReplicaSet.apps simple-prod-query-75bf6fc9cc SuccessfulCreate Created pod: simple-prod-query-75bf6fc9cc-krtrx replicaset-controller logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:18 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-75bf6fc9cc to 1 deployment-controller logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:19 +0000 UTC Normal Pod simple-prod-collector-64c6f545f9-qcppt AddedInterface Add eth0 [10.131.0.92/23] from ovn-kubernetes logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:19 +0000 UTC Normal Pod simple-prod-collector-64c6f545f9-qcppt.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:19 +0000 UTC Normal Pod simple-prod-collector-64c6f545f9-qcppt.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:19 +0000 UTC Normal Pod simple-prod-collector-64c6f545f9-qcppt.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:19 +0000 UTC Normal Pod simple-prod-query-75bf6fc9cc-krtrx.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:22:22 | collector-autoscale | 2023-12-11 09:22:19 +0000 UTC Normal Pod simple-prod-query-75bf6fc9cc-krtrx.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:22:22 | collector-autoscale | Deleting namespace: kuttl-test-prime-chamois === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (450.68s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.69s) --- PASS: kuttl/harness/collector-otlp-production-grpc (79.64s) --- PASS: kuttl/harness/set-custom-img (165.51s) --- PASS: kuttl/harness/collector-otlp-production-http (69.68s) --- PASS: kuttl/harness/collector-otlp-allinone-grpc (46.76s) --- PASS: kuttl/harness/collector-otlp-allinone-http (38.58s) --- PASS: kuttl/harness/collector-autoscale (44.65s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name miscellaneous --report --output /logs/artifacts/miscellaneous.xml ./artifacts/kuttl-report.xml time="2023-12-11T09:22:32Z" level=debug msg="Setting a new name for the test suites" time="2023-12-11T09:22:32Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-11T09:22:32Z" level=debug msg="normalizing test case names" time="2023-12-11T09:22:32Z" level=debug msg="miscellaneous/artifacts -> miscellaneous_artifacts" time="2023-12-11T09:22:32Z" level=debug msg="miscellaneous/collector-otlp-production-grpc -> miscellaneous_collector_otlp_production_grpc" time="2023-12-11T09:22:32Z" level=debug msg="miscellaneous/set-custom-img -> miscellaneous_set_custom_img" time="2023-12-11T09:22:32Z" level=debug msg="miscellaneous/collector-otlp-production-http -> miscellaneous_collector_otlp_production_http" time="2023-12-11T09:22:32Z" level=debug msg="miscellaneous/collector-otlp-allinone-grpc -> miscellaneous_collector_otlp_allinone_grpc" time="2023-12-11T09:22:32Z" level=debug msg="miscellaneous/collector-otlp-allinone-http -> miscellaneous_collector_otlp_allinone_http" time="2023-12-11T09:22:32Z" level=debug msg="miscellaneous/collector-autoscale -> miscellaneous_collector_autoscale" +----------------------------------------------+--------+ | NAME | RESULT | +----------------------------------------------+--------+ | miscellaneous_artifacts | passed | | miscellaneous_collector_otlp_production_grpc | passed | | miscellaneous_set_custom_img | passed | | miscellaneous_collector_otlp_production_http | passed | | miscellaneous_collector_otlp_allinone_grpc | passed | | miscellaneous_collector_otlp_allinone_http | passed | | miscellaneous_collector_autoscale | passed | +----------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh sidecar false true + '[' 3 -ne 3 ']' + test_suite_name=sidecar + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/sidecar.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-sidecar make[2]: Entering directory '/tmp/jaeger-tests' ./tests/e2e/sidecar/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-09-012410 True False 156m Cluster version is 4.15.0-0.nightly-2023-12-09-012410' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-09-012410 True False 156m Cluster version is 4.15.0-0.nightly-2023-12-09-012410' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/sidecar/render.sh ++ export SUITE_DIR=./tests/e2e/sidecar ++ SUITE_DIR=./tests/e2e/sidecar ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/sidecar ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + jaeger_service_name=order + start_test sidecar-deployment + '[' 1 -ne 1 ']' + test_name=sidecar-deployment + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-deployment' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-deployment\e[0m' Rendering files for test sidecar-deployment + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build + '[' _build '!=' _build ']' + mkdir -p sidecar-deployment + cd sidecar-deployment + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-namespace + '[' 1 -ne 1 ']' + test_name=sidecar-namespace + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-namespace' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-namespace\e[0m' Rendering files for test sidecar-namespace + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-deployment + '[' sidecar-deployment '!=' _build ']' + cd .. + mkdir -p sidecar-namespace + cd sidecar-namespace + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-skip-webhook + '[' 1 -ne 1 ']' + test_name=sidecar-skip-webhook + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-skip-webhook' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-skip-webhook\e[0m' Rendering files for test sidecar-skip-webhook + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-namespace + '[' sidecar-namespace '!=' _build ']' + cd .. + mkdir -p sidecar-skip-webhook + cd sidecar-skip-webhook + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running sidecar E2E tests' Running sidecar E2E tests + cd tests/e2e/sidecar/_build + set +e + KUBECONFIG=/tmp/kubeconfig-282053367 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 4 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/sidecar-deployment === PAUSE kuttl/harness/sidecar-deployment === RUN kuttl/harness/sidecar-namespace === PAUSE kuttl/harness/sidecar-namespace === RUN kuttl/harness/sidecar-skip-webhook === PAUSE kuttl/harness/sidecar-skip-webhook === CONT kuttl/harness/artifacts logger.go:42: 09:22:40 | artifacts | Creating namespace: kuttl-test-sacred-raptor logger.go:42: 09:22:40 | artifacts | artifacts events from ns kuttl-test-sacred-raptor: logger.go:42: 09:22:40 | artifacts | Deleting namespace: kuttl-test-sacred-raptor === CONT kuttl/harness/sidecar-namespace logger.go:42: 09:22:46 | sidecar-namespace | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:22:47 | sidecar-namespace | Creating namespace: kuttl-test-integral-stallion logger.go:42: 09:22:47 | sidecar-namespace/0-install | starting test step 0-install logger.go:42: 09:22:47 | sidecar-namespace/0-install | Jaeger:kuttl-test-integral-stallion/agent-as-sidecar created logger.go:42: 09:22:52 | sidecar-namespace/0-install | test step completed 0-install logger.go:42: 09:22:52 | sidecar-namespace/1-install | starting test step 1-install logger.go:42: 09:22:53 | sidecar-namespace/1-install | Deployment:kuttl-test-integral-stallion/vertx-create-span-sidecar created logger.go:42: 09:22:55 | sidecar-namespace/1-install | test step completed 1-install logger.go:42: 09:22:55 | sidecar-namespace/2-enable-injection | starting test step 2-enable-injection logger.go:42: 09:22:55 | sidecar-namespace/2-enable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="true"] logger.go:42: 09:22:55 | sidecar-namespace/2-enable-injection | namespace/kuttl-test-integral-stallion annotate logger.go:42: 09:23:00 | sidecar-namespace/2-enable-injection | test step completed 2-enable-injection logger.go:42: 09:23:00 | sidecar-namespace/3-find-service | starting test step 3-find-service logger.go:42: 09:23:00 | sidecar-namespace/3-find-service | Job:kuttl-test-integral-stallion/00-find-service created logger.go:42: 09:23:15 | sidecar-namespace/3-find-service | test step completed 3-find-service logger.go:42: 09:23:15 | sidecar-namespace/4-other-instance | starting test step 4-other-instance logger.go:42: 09:23:15 | sidecar-namespace/4-other-instance | Jaeger:kuttl-test-integral-stallion/agent-as-sidecar2 created logger.go:42: 09:23:24 | sidecar-namespace/4-other-instance | test step completed 4-other-instance logger.go:42: 09:23:24 | sidecar-namespace/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 09:23:24 | sidecar-namespace/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 09:23:24 | sidecar-namespace/6-find-service | starting test step 6-find-service logger.go:42: 09:23:24 | sidecar-namespace/6-find-service | Job:kuttl-test-integral-stallion/01-find-service created logger.go:42: 09:23:43 | sidecar-namespace/6-find-service | test step completed 6-find-service logger.go:42: 09:23:43 | sidecar-namespace/7-disable-injection | starting test step 7-disable-injection logger.go:42: 09:23:43 | sidecar-namespace/7-disable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="false"] logger.go:42: 09:23:43 | sidecar-namespace/7-disable-injection | namespace/kuttl-test-integral-stallion annotate logger.go:42: 09:23:45 | sidecar-namespace/7-disable-injection | test step completed 7-disable-injection logger.go:42: 09:23:45 | sidecar-namespace | sidecar-namespace events from ns kuttl-test-integral-stallion: logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:51 +0000 UTC Normal Pod agent-as-sidecar-59c96db6fd-5rgcd Binding Scheduled Successfully assigned kuttl-test-integral-stallion/agent-as-sidecar-59c96db6fd-5rgcd to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:51 +0000 UTC Normal Pod agent-as-sidecar-59c96db6fd-5rgcd AddedInterface Add eth0 [10.129.2.68/23] from ovn-kubernetes logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:51 +0000 UTC Normal Pod agent-as-sidecar-59c96db6fd-5rgcd.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:51 +0000 UTC Normal Pod agent-as-sidecar-59c96db6fd-5rgcd.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:51 +0000 UTC Normal Pod agent-as-sidecar-59c96db6fd-5rgcd.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:51 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-59c96db6fd SuccessfulCreate Created pod: agent-as-sidecar-59c96db6fd-5rgcd replicaset-controller logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:51 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-59c96db6fd to 1 deployment-controller logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:53 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-r649w Binding Scheduled Successfully assigned kuttl-test-integral-stallion/vertx-create-span-sidecar-84d458b68c-r649w to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:53 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-r649w AddedInterface Add eth0 [10.131.0.93/23] from ovn-kubernetes logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:53 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-r649w.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:53 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-r649w.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:53 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-r649w.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:53 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulCreate Created pod: vertx-create-span-sidecar-84d458b68c-r649w replicaset-controller logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:53 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-84d458b68c to 1 deployment-controller logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:55 +0000 UTC Normal Pod vertx-create-span-sidecar-7989687b56-gwdzv Binding Scheduled Successfully assigned kuttl-test-integral-stallion/vertx-create-span-sidecar-7989687b56-gwdzv to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:55 +0000 UTC Normal Pod vertx-create-span-sidecar-7989687b56-gwdzv AddedInterface Add eth0 [10.128.2.84/23] from ovn-kubernetes logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:55 +0000 UTC Normal Pod vertx-create-span-sidecar-7989687b56-gwdzv.spec.containers{vertx-create-span-sidecar} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:55 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7989687b56 SuccessfulCreate Created pod: vertx-create-span-sidecar-7989687b56-gwdzv replicaset-controller logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:55 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-7989687b56 to 1 deployment-controller logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:59 +0000 UTC Normal Pod vertx-create-span-sidecar-7989687b56-gwdzv.spec.containers{vertx-create-span-sidecar} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.71s (3.71s including waiting) kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:59 +0000 UTC Normal Pod vertx-create-span-sidecar-7989687b56-gwdzv.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:59 +0000 UTC Normal Pod vertx-create-span-sidecar-7989687b56-gwdzv.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:59 +0000 UTC Normal Pod vertx-create-span-sidecar-7989687b56-gwdzv.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:59 +0000 UTC Normal Pod vertx-create-span-sidecar-7989687b56-gwdzv.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:22:59 +0000 UTC Normal Pod vertx-create-span-sidecar-7989687b56-gwdzv.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:00 +0000 UTC Normal Pod 00-find-service-25lfd Binding Scheduled Successfully assigned kuttl-test-integral-stallion/00-find-service-25lfd to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:00 +0000 UTC Normal Pod 00-find-service-25lfd AddedInterface Add eth0 [10.129.2.69/23] from ovn-kubernetes logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:00 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-25lfd job-controller logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:01 +0000 UTC Normal Pod 00-find-service-25lfd.spec.containers{asserts-container} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:01 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-r649w.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.93:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:01 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-r649w.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.93:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:03 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-r649w.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:03 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-r649w.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.93:8080/": read tcp 10.131.0.2:38818->10.131.0.93:8080: read: connection reset by peer kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:03 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-r649w.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.93:8080/": dial tcp 10.131.0.93:8080: connect: connection refused kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:04 +0000 UTC Normal Pod 00-find-service-25lfd.spec.containers{asserts-container} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" in 3.554s (3.554s including waiting) kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:04 +0000 UTC Normal Pod 00-find-service-25lfd.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:04 +0000 UTC Normal Pod 00-find-service-25lfd.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:07 +0000 UTC Warning Pod vertx-create-span-sidecar-7989687b56-gwdzv.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.84:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:07 +0000 UTC Warning Pod vertx-create-span-sidecar-7989687b56-gwdzv.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.84:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:09 +0000 UTC Normal Pod vertx-create-span-sidecar-7989687b56-gwdzv.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:10 +0000 UTC Warning Pod vertx-create-span-sidecar-7989687b56-gwdzv.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.84:8080/": read tcp 10.128.2.2:45874->10.128.2.84:8080: read: connection reset by peer kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:10 +0000 UTC Warning Pod vertx-create-span-sidecar-7989687b56-gwdzv.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.84:8080/": dial tcp 10.128.2.84:8080: connect: connection refused kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:10 +0000 UTC Normal Pod vertx-create-span-sidecar-7989687b56-gwdzv.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:14 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-r649w.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.93:8080/": read tcp 10.131.0.2:48194->10.131.0.93:8080: read: connection reset by peer kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:15 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:21 +0000 UTC Normal Pod agent-as-sidecar2-7896f9b9d6-ml6x6 Binding Scheduled Successfully assigned kuttl-test-integral-stallion/agent-as-sidecar2-7896f9b9d6-ml6x6 to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:21 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-7896f9b9d6 SuccessfulCreate Created pod: agent-as-sidecar2-7896f9b9d6-ml6x6 replicaset-controller logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:21 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-7896f9b9d6 to 1 deployment-controller logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:22 +0000 UTC Normal Pod agent-as-sidecar2-7896f9b9d6-ml6x6 AddedInterface Add eth0 [10.129.2.70/23] from ovn-kubernetes logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:22 +0000 UTC Normal Pod agent-as-sidecar2-7896f9b9d6-ml6x6.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:22 +0000 UTC Normal Pod agent-as-sidecar2-7896f9b9d6-ml6x6.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:22 +0000 UTC Normal Pod agent-as-sidecar2-7896f9b9d6-ml6x6.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:24 +0000 UTC Normal Pod 01-find-service-zwpwc Binding Scheduled Successfully assigned kuttl-test-integral-stallion/01-find-service-zwpwc to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:24 +0000 UTC Normal Pod 01-find-service-zwpwc AddedInterface Add eth0 [10.131.0.94/23] from ovn-kubernetes logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:24 +0000 UTC Normal Pod 01-find-service-zwpwc.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:24 +0000 UTC Normal Pod 01-find-service-zwpwc.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:24 +0000 UTC Normal Pod 01-find-service-zwpwc.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:24 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-zwpwc job-controller logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:24 +0000 UTC Normal Pod agent-as-sidecar-59c96db6fd-5rgcd.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:27 +0000 UTC Normal Pod vertx-create-span-sidecar-6f49b4cdc8-lwhm2 Binding Scheduled Successfully assigned kuttl-test-integral-stallion/vertx-create-span-sidecar-6f49b4cdc8-lwhm2 to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:27 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-6f49b4cdc8 SuccessfulCreate Created pod: vertx-create-span-sidecar-6f49b4cdc8-lwhm2 replicaset-controller logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:27 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulDelete Deleted pod: vertx-create-span-sidecar-84d458b68c-r649w replicaset-controller logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:27 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-84d458b68c to 0 from 1 deployment-controller logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:27 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-6f49b4cdc8 to 1 from 0 deployment-controller logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:28 +0000 UTC Normal Pod vertx-create-span-sidecar-6f49b4cdc8-lwhm2 AddedInterface Add eth0 [10.129.2.71/23] from ovn-kubernetes logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:28 +0000 UTC Normal Pod vertx-create-span-sidecar-6f49b4cdc8-lwhm2.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:28 +0000 UTC Normal Pod vertx-create-span-sidecar-6f49b4cdc8-lwhm2.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:28 +0000 UTC Normal Pod vertx-create-span-sidecar-6f49b4cdc8-lwhm2.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:28 +0000 UTC Normal Pod vertx-create-span-sidecar-6f49b4cdc8-lwhm2.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:28 +0000 UTC Normal Pod vertx-create-span-sidecar-6f49b4cdc8-lwhm2.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:28 +0000 UTC Normal Pod vertx-create-span-sidecar-6f49b4cdc8-lwhm2.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:37 +0000 UTC Warning Pod vertx-create-span-sidecar-6f49b4cdc8-lwhm2.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.71:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:37 +0000 UTC Warning Pod vertx-create-span-sidecar-6f49b4cdc8-lwhm2.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.129.2.71:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:39 +0000 UTC Normal Pod vertx-create-span-sidecar-6f49b4cdc8-lwhm2.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:39 +0000 UTC Warning Pod vertx-create-span-sidecar-6f49b4cdc8-lwhm2.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.71:8080/": read tcp 10.129.2.2:55560->10.129.2.71:8080: read: connection reset by peer kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:39 +0000 UTC Warning Pod vertx-create-span-sidecar-6f49b4cdc8-lwhm2.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.71:8080/": dial tcp 10.129.2.71:8080: connect: connection refused kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:43 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:43 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7989687b56 SuccessfulDelete Deleted pod: vertx-create-span-sidecar-7989687b56-gwdzv replicaset-controller logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:43 +0000 UTC Normal Pod vertx-create-span-sidecar-7d98b48dd9-jr8nk Binding Scheduled Successfully assigned kuttl-test-integral-stallion/vertx-create-span-sidecar-7d98b48dd9-jr8nk to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:43 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7d98b48dd9 SuccessfulCreate Created pod: vertx-create-span-sidecar-7d98b48dd9-jr8nk replicaset-controller logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:43 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-7989687b56 to 0 from 1 deployment-controller logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:43 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-7d98b48dd9 to 1 from 0 deployment-controller logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:44 +0000 UTC Normal Pod vertx-create-span-sidecar-7d98b48dd9-jr8nk AddedInterface Add eth0 [10.131.0.95/23] from ovn-kubernetes logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:44 +0000 UTC Normal Pod vertx-create-span-sidecar-7d98b48dd9-jr8nk.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:44 +0000 UTC Normal Pod vertx-create-span-sidecar-7d98b48dd9-jr8nk.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 09:23:45 | sidecar-namespace | 2023-12-11 09:23:44 +0000 UTC Normal Pod vertx-create-span-sidecar-7d98b48dd9-jr8nk.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 09:23:46 | sidecar-namespace | Deleting namespace: kuttl-test-integral-stallion === CONT kuttl/harness/sidecar-skip-webhook logger.go:42: 09:24:09 | sidecar-skip-webhook | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:24:10 | sidecar-skip-webhook | Creating namespace: kuttl-test-liberal-mosquito logger.go:42: 09:24:10 | sidecar-skip-webhook/0-install | starting test step 0-install logger.go:42: 09:24:10 | sidecar-skip-webhook/0-install | Jaeger:kuttl-test-liberal-mosquito/agent-as-sidecar created logger.go:42: 09:24:16 | sidecar-skip-webhook/0-install | test step completed 0-install logger.go:42: 09:24:16 | sidecar-skip-webhook/1-install | starting test step 1-install logger.go:42: 09:24:16 | sidecar-skip-webhook/1-install | Deployment:kuttl-test-liberal-mosquito/vertx-create-span-sidecar created logger.go:42: 09:24:18 | sidecar-skip-webhook/1-install | test step completed 1-install logger.go:42: 09:24:18 | sidecar-skip-webhook/2-add-anotation-and-label | starting test step 2-add-anotation-and-label logger.go:42: 09:24:18 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name=jaeger-operator --namespace kuttl-test-liberal-mosquito] logger.go:42: 09:24:18 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar labeled logger.go:42: 09:24:18 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-liberal-mosquito] logger.go:42: 09:24:19 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar annotate logger.go:42: 09:24:19 | sidecar-skip-webhook/2-add-anotation-and-label | test step completed 2-add-anotation-and-label logger.go:42: 09:24:19 | sidecar-skip-webhook/3-remove-label | starting test step 3-remove-label logger.go:42: 09:24:19 | sidecar-skip-webhook/3-remove-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name- --namespace kuttl-test-liberal-mosquito] logger.go:42: 09:24:19 | sidecar-skip-webhook/3-remove-label | deployment.apps/vertx-create-span-sidecar unlabeled logger.go:42: 09:24:20 | sidecar-skip-webhook/3-remove-label | test step completed 3-remove-label logger.go:42: 09:24:20 | sidecar-skip-webhook | sidecar-skip-webhook events from ns kuttl-test-liberal-mosquito: logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:13 +0000 UTC Normal Pod agent-as-sidecar-5b969bf4d6-vhj5m Binding Scheduled Successfully assigned kuttl-test-liberal-mosquito/agent-as-sidecar-5b969bf4d6-vhj5m to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:13 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-5b969bf4d6 SuccessfulCreate Created pod: agent-as-sidecar-5b969bf4d6-vhj5m replicaset-controller logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:13 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-5b969bf4d6 to 1 deployment-controller logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:14 +0000 UTC Normal Pod agent-as-sidecar-5b969bf4d6-vhj5m AddedInterface Add eth0 [10.129.2.72/23] from ovn-kubernetes logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:14 +0000 UTC Normal Pod agent-as-sidecar-5b969bf4d6-vhj5m.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:14 +0000 UTC Normal Pod agent-as-sidecar-5b969bf4d6-vhj5m.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:14 +0000 UTC Normal Pod agent-as-sidecar-5b969bf4d6-vhj5m.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:16 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-8hhp9 Binding Scheduled Successfully assigned kuttl-test-liberal-mosquito/vertx-create-span-sidecar-84d458b68c-8hhp9 to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:16 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulCreate Created pod: vertx-create-span-sidecar-84d458b68c-8hhp9 replicaset-controller logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:16 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-84d458b68c to 1 deployment-controller logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:17 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-8hhp9 AddedInterface Add eth0 [10.131.0.96/23] from ovn-kubernetes logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:17 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-8hhp9.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:17 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-8hhp9.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:17 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-8hhp9.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:19 +0000 UTC Normal Pod vertx-create-span-sidecar-7986f49c8d-cksk9 Binding Scheduled Successfully assigned kuttl-test-liberal-mosquito/vertx-create-span-sidecar-7986f49c8d-cksk9 to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:19 +0000 UTC Normal Pod vertx-create-span-sidecar-7986f49c8d-cksk9 AddedInterface Add eth0 [10.128.2.85/23] from ovn-kubernetes logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:19 +0000 UTC Normal Pod vertx-create-span-sidecar-7986f49c8d-cksk9.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:19 +0000 UTC Normal Pod vertx-create-span-sidecar-7986f49c8d-cksk9.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:19 +0000 UTC Normal Pod vertx-create-span-sidecar-7986f49c8d-cksk9.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:19 +0000 UTC Normal Pod vertx-create-span-sidecar-7986f49c8d-cksk9.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:19 +0000 UTC Normal Pod vertx-create-span-sidecar-7986f49c8d-cksk9.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:19 +0000 UTC Normal Pod vertx-create-span-sidecar-7986f49c8d-cksk9.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:19 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7986f49c8d SuccessfulCreate Created pod: vertx-create-span-sidecar-7986f49c8d-cksk9 replicaset-controller logger.go:42: 09:24:20 | sidecar-skip-webhook | 2023-12-11 09:24:19 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-7986f49c8d to 1 deployment-controller logger.go:42: 09:24:20 | sidecar-skip-webhook | Deleting namespace: kuttl-test-liberal-mosquito === CONT kuttl/harness/sidecar-deployment logger.go:42: 09:24:26 | sidecar-deployment | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:24:27 | sidecar-deployment | Creating namespace: kuttl-test-civil-tadpole logger.go:42: 09:24:27 | sidecar-deployment/0-install | starting test step 0-install logger.go:42: 09:24:27 | sidecar-deployment/0-install | Jaeger:kuttl-test-civil-tadpole/agent-as-sidecar created logger.go:42: 09:24:33 | sidecar-deployment/0-install | test step completed 0-install logger.go:42: 09:24:33 | sidecar-deployment/1-install | starting test step 1-install logger.go:42: 09:24:34 | sidecar-deployment/1-install | Deployment:kuttl-test-civil-tadpole/vertx-create-span-sidecar created logger.go:42: 09:24:36 | sidecar-deployment/1-install | test step completed 1-install logger.go:42: 09:24:36 | sidecar-deployment/2-enable-injection | starting test step 2-enable-injection logger.go:42: 09:24:36 | sidecar-deployment/2-enable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-civil-tadpole] logger.go:42: 09:24:36 | sidecar-deployment/2-enable-injection | deployment.apps/vertx-create-span-sidecar annotate logger.go:42: 09:24:37 | sidecar-deployment/2-enable-injection | test step completed 2-enable-injection logger.go:42: 09:24:37 | sidecar-deployment/3-find-service | starting test step 3-find-service logger.go:42: 09:24:37 | sidecar-deployment/3-find-service | Job:kuttl-test-civil-tadpole/00-find-service created logger.go:42: 09:24:48 | sidecar-deployment/3-find-service | test step completed 3-find-service logger.go:42: 09:24:48 | sidecar-deployment/4-other-instance | starting test step 4-other-instance logger.go:42: 09:24:48 | sidecar-deployment/4-other-instance | Jaeger:kuttl-test-civil-tadpole/agent-as-sidecar2 created logger.go:42: 09:24:53 | sidecar-deployment/4-other-instance | test step completed 4-other-instance logger.go:42: 09:24:53 | sidecar-deployment/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 09:24:54 | sidecar-deployment/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 09:24:54 | sidecar-deployment/6-find-service | starting test step 6-find-service logger.go:42: 09:24:54 | sidecar-deployment/6-find-service | Job:kuttl-test-civil-tadpole/01-find-service created logger.go:42: 09:25:13 | sidecar-deployment/6-find-service | test step completed 6-find-service logger.go:42: 09:25:13 | sidecar-deployment/7-disable-injection | starting test step 7-disable-injection logger.go:42: 09:25:13 | sidecar-deployment/7-disable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=false --namespace kuttl-test-civil-tadpole] logger.go:42: 09:25:13 | sidecar-deployment/7-disable-injection | deployment.apps/vertx-create-span-sidecar annotate logger.go:42: 09:25:14 | sidecar-deployment/7-disable-injection | test step completed 7-disable-injection logger.go:42: 09:25:14 | sidecar-deployment | sidecar-deployment events from ns kuttl-test-civil-tadpole: logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:31 +0000 UTC Normal Pod agent-as-sidecar-697cbf46d8-5cblv Binding Scheduled Successfully assigned kuttl-test-civil-tadpole/agent-as-sidecar-697cbf46d8-5cblv to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:31 +0000 UTC Normal Pod agent-as-sidecar-697cbf46d8-5cblv AddedInterface Add eth0 [10.129.2.73/23] from ovn-kubernetes logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:31 +0000 UTC Normal Pod agent-as-sidecar-697cbf46d8-5cblv.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:31 +0000 UTC Normal Pod agent-as-sidecar-697cbf46d8-5cblv.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:31 +0000 UTC Normal Pod agent-as-sidecar-697cbf46d8-5cblv.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:31 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-697cbf46d8 SuccessfulCreate Created pod: agent-as-sidecar-697cbf46d8-5cblv replicaset-controller logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:31 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-697cbf46d8 to 1 deployment-controller logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:34 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-dcfq6 Binding Scheduled Successfully assigned kuttl-test-civil-tadpole/vertx-create-span-sidecar-84d458b68c-dcfq6 to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:34 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-dcfq6 AddedInterface Add eth0 [10.131.0.97/23] from ovn-kubernetes logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:34 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-dcfq6.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:34 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-dcfq6.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:34 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-dcfq6.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:34 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulCreate Created pod: vertx-create-span-sidecar-84d458b68c-dcfq6 replicaset-controller logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:34 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-84d458b68c to 1 deployment-controller logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:36 +0000 UTC Normal Pod vertx-create-span-sidecar-56ffb6c94-8vdcw Binding Scheduled Successfully assigned kuttl-test-civil-tadpole/vertx-create-span-sidecar-56ffb6c94-8vdcw to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:36 +0000 UTC Normal Pod vertx-create-span-sidecar-56ffb6c94-8vdcw AddedInterface Add eth0 [10.128.2.86/23] from ovn-kubernetes logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:36 +0000 UTC Normal Pod vertx-create-span-sidecar-56ffb6c94-8vdcw.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:36 +0000 UTC Normal Pod vertx-create-span-sidecar-56ffb6c94-8vdcw.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:36 +0000 UTC Normal Pod vertx-create-span-sidecar-56ffb6c94-8vdcw.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:36 +0000 UTC Normal Pod vertx-create-span-sidecar-56ffb6c94-8vdcw.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:36 +0000 UTC Normal Pod vertx-create-span-sidecar-56ffb6c94-8vdcw.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:36 +0000 UTC Normal Pod vertx-create-span-sidecar-56ffb6c94-8vdcw.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:36 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-56ffb6c94 SuccessfulCreate Created pod: vertx-create-span-sidecar-56ffb6c94-8vdcw replicaset-controller logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:36 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-56ffb6c94 to 1 deployment-controller logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:37 +0000 UTC Normal Pod 00-find-service-dgs4w Binding Scheduled Successfully assigned kuttl-test-civil-tadpole/00-find-service-dgs4w to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:37 +0000 UTC Normal Pod 00-find-service-dgs4w AddedInterface Add eth0 [10.129.2.74/23] from ovn-kubernetes logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:37 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-dgs4w job-controller logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:38 +0000 UTC Normal Pod 00-find-service-dgs4w.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:38 +0000 UTC Normal Pod 00-find-service-dgs4w.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:38 +0000 UTC Normal Pod 00-find-service-dgs4w.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:42 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-dcfq6.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.97:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:42 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-dcfq6.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.97:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:44 +0000 UTC Warning Pod vertx-create-span-sidecar-56ffb6c94-8vdcw.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.86:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:44 +0000 UTC Warning Pod vertx-create-span-sidecar-56ffb6c94-8vdcw.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.86:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:44 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-dcfq6.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:44 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-dcfq6.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.97:8080/": read tcp 10.131.0.2:48670->10.131.0.97:8080: read: connection reset by peer kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:44 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-dcfq6.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.97:8080/": dial tcp 10.131.0.97:8080: connect: connection refused kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:46 +0000 UTC Normal Pod vertx-create-span-sidecar-56ffb6c94-8vdcw.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:47 +0000 UTC Warning Pod vertx-create-span-sidecar-56ffb6c94-8vdcw.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.86:8080/": read tcp 10.128.2.2:40494->10.128.2.86:8080: read: connection reset by peer kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:47 +0000 UTC Warning Pod vertx-create-span-sidecar-56ffb6c94-8vdcw.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.86:8080/": dial tcp 10.128.2.86:8080: connect: connection refused kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:48 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:52 +0000 UTC Normal Pod agent-as-sidecar2-755594d79b-7k6c8 Binding Scheduled Successfully assigned kuttl-test-civil-tadpole/agent-as-sidecar2-755594d79b-7k6c8 to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:52 +0000 UTC Normal Pod agent-as-sidecar2-755594d79b-7k6c8 AddedInterface Add eth0 [10.129.2.75/23] from ovn-kubernetes logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:52 +0000 UTC Normal Pod agent-as-sidecar2-755594d79b-7k6c8.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:52 +0000 UTC Normal Pod agent-as-sidecar2-755594d79b-7k6c8.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:52 +0000 UTC Normal Pod agent-as-sidecar2-755594d79b-7k6c8.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:52 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-755594d79b SuccessfulCreate Created pod: agent-as-sidecar2-755594d79b-7k6c8 replicaset-controller logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:52 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-755594d79b to 1 deployment-controller logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:54 +0000 UTC Normal Pod 01-find-service-qfm9n Binding Scheduled Successfully assigned kuttl-test-civil-tadpole/01-find-service-qfm9n to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:54 +0000 UTC Normal Pod 01-find-service-qfm9n AddedInterface Add eth0 [10.131.0.98/23] from ovn-kubernetes logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:54 +0000 UTC Normal Pod 01-find-service-qfm9n.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-2b8p0w0t/pipeline@sha256:c8da8ad4ce56c14877e144b7d4c3bb6e86f6ab5337c401cab89693830b07ae0a" already present on machine kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:54 +0000 UTC Normal Pod 01-find-service-qfm9n.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:54 +0000 UTC Normal Pod 01-find-service-qfm9n.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:54 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-qfm9n job-controller logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:54 +0000 UTC Normal Pod agent-as-sidecar-697cbf46d8-5cblv.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:55 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-dcfq6.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.97:8080/": read tcp 10.131.0.2:49718->10.131.0.97:8080: read: connection reset by peer kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:57 +0000 UTC Normal Pod vertx-create-span-sidecar-78c8cbb644-r6frn Binding Scheduled Successfully assigned kuttl-test-civil-tadpole/vertx-create-span-sidecar-78c8cbb644-r6frn to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:57 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-78c8cbb644 SuccessfulCreate Created pod: vertx-create-span-sidecar-78c8cbb644-r6frn replicaset-controller logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:57 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulDelete Deleted pod: vertx-create-span-sidecar-84d458b68c-dcfq6 replicaset-controller logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:57 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-84d458b68c to 0 from 1 deployment-controller logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:57 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-78c8cbb644 to 1 from 0 deployment-controller logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:58 +0000 UTC Normal Pod vertx-create-span-sidecar-78c8cbb644-r6frn AddedInterface Add eth0 [10.129.2.76/23] from ovn-kubernetes logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:58 +0000 UTC Normal Pod vertx-create-span-sidecar-78c8cbb644-r6frn.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:58 +0000 UTC Normal Pod vertx-create-span-sidecar-78c8cbb644-r6frn.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:58 +0000 UTC Normal Pod vertx-create-span-sidecar-78c8cbb644-r6frn.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:58 +0000 UTC Normal Pod vertx-create-span-sidecar-78c8cbb644-r6frn.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:58 +0000 UTC Normal Pod vertx-create-span-sidecar-78c8cbb644-r6frn.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:24:58 +0000 UTC Normal Pod vertx-create-span-sidecar-78c8cbb644-r6frn.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:25:07 +0000 UTC Warning Pod vertx-create-span-sidecar-78c8cbb644-r6frn.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.129.2.76:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:25:07 +0000 UTC Warning Pod vertx-create-span-sidecar-78c8cbb644-r6frn.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.76:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:25:09 +0000 UTC Normal Pod vertx-create-span-sidecar-78c8cbb644-r6frn.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:25:09 +0000 UTC Warning Pod vertx-create-span-sidecar-78c8cbb644-r6frn.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.76:8080/": read tcp 10.129.2.2:43274->10.129.2.76:8080: read: connection reset by peer kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:25:09 +0000 UTC Warning Pod vertx-create-span-sidecar-78c8cbb644-r6frn.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.76:8080/": dial tcp 10.129.2.76:8080: connect: connection refused kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:25:13 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:25:13 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-56ffb6c94 SuccessfulDelete Deleted pod: vertx-create-span-sidecar-56ffb6c94-8vdcw replicaset-controller logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:25:13 +0000 UTC Normal Pod vertx-create-span-sidecar-7758d8d5ff-qwkqg Binding Scheduled Successfully assigned kuttl-test-civil-tadpole/vertx-create-span-sidecar-7758d8d5ff-qwkqg to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:25:13 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7758d8d5ff SuccessfulCreate Created pod: vertx-create-span-sidecar-7758d8d5ff-qwkqg replicaset-controller logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:25:13 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-56ffb6c94 to 0 from 1 deployment-controller logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:25:13 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-7758d8d5ff to 1 from 0 deployment-controller logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:25:14 +0000 UTC Normal Pod vertx-create-span-sidecar-7758d8d5ff-qwkqg AddedInterface Add eth0 [10.131.0.99/23] from ovn-kubernetes logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:25:14 +0000 UTC Normal Pod vertx-create-span-sidecar-7758d8d5ff-qwkqg.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:25:14 +0000 UTC Normal Pod vertx-create-span-sidecar-7758d8d5ff-qwkqg.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 09:25:14 | sidecar-deployment | 2023-12-11 09:25:14 +0000 UTC Normal Pod vertx-create-span-sidecar-7758d8d5ff-qwkqg.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 09:25:14 | sidecar-deployment | Deleting namespace: kuttl-test-civil-tadpole === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (345.28s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.84s) --- PASS: kuttl/harness/sidecar-namespace (66.18s) --- PASS: kuttl/harness/sidecar-skip-webhook (33.63s) --- PASS: kuttl/harness/sidecar-deployment (54.93s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name sidecar --report --output /logs/artifacts/sidecar.xml ./artifacts/kuttl-report.xml time="2023-12-11T09:28:26Z" level=debug msg="Setting a new name for the test suites" time="2023-12-11T09:28:26Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-11T09:28:26Z" level=debug msg="normalizing test case names" time="2023-12-11T09:28:26Z" level=debug msg="sidecar/artifacts -> sidecar_artifacts" time="2023-12-11T09:28:26Z" level=debug msg="sidecar/sidecar-namespace -> sidecar_sidecar_namespace" time="2023-12-11T09:28:26Z" level=debug msg="sidecar/sidecar-skip-webhook -> sidecar_sidecar_skip_webhook" time="2023-12-11T09:28:26Z" level=debug msg="sidecar/sidecar-deployment -> sidecar_sidecar_deployment" +------------------------------+--------+ | NAME | RESULT | +------------------------------+--------+ | sidecar_artifacts | passed | | sidecar_sidecar_namespace | passed | | sidecar_sidecar_skip_webhook | passed | | sidecar_sidecar_deployment | passed | +------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh streaming false true + '[' 3 -ne 3 ']' + test_suite_name=streaming + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/streaming.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-streaming make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=3.6.0 \ SKIP_KAFKA=false \ SKIP_ES_EXTERNAL=true \ ./tests/e2e/streaming/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-09-012410 True False 162m Cluster version is 4.15.0-0.nightly-2023-12-09-012410' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-09-012410 True False 162m Cluster version is 4.15.0-0.nightly-2023-12-09-012410' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/streaming/render.sh ++ export SUITE_DIR=./tests/e2e/streaming ++ SUITE_DIR=./tests/e2e/streaming ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/streaming ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + '[' false = true ']' + start_test streaming-simple + '[' 1 -ne 1 ']' + test_name=streaming-simple + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-simple' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-simple\e[0m' Rendering files for test streaming-simple + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build + '[' _build '!=' _build ']' + mkdir -p streaming-simple + cd streaming-simple + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./1-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/streaming-jaeger-assert.yaml.template -o ./04-assert.yaml + render_smoke_test simple-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=simple-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + export JAEGER_NAME=simple-streaming + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-tls + '[' 1 -ne 1 ']' + test_name=streaming-with-tls + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-tls' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-tls\e[0m' Rendering files for test streaming-with-tls + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-simple + '[' streaming-simple '!=' _build ']' + cd .. + mkdir -p streaming-with-tls + cd streaming-with-tls + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./1-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + render_smoke_test tls-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=tls-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + export JAEGER_NAME=tls-streaming + JAEGER_NAME=tls-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-autoprovisioning-autoscale + '[' 1 -ne 1 ']' + test_name=streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-autoprovisioning-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-autoprovisioning-autoscale\e[0m' Rendering files for test streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-with-tls + '[' streaming-with-tls '!=' _build ']' + cd .. + mkdir -p streaming-with-autoprovisioning-autoscale + cd streaming-with-autoprovisioning-autoscale + '[' true = true ']' + rm ./00-install.yaml ./00-assert.yaml + render_install_elasticsearch upstream 01 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=01 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./01-assert.yaml + jaeger_name=auto-provisioned + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="20Mi"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="500m"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.autoscale=true ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.minReplicas=1 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.maxReplicas=2 ./02-install.yaml + render_assert_kafka true auto-provisioned 03 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provisioned + test_step=03 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./03-assert.yaml ++ expr 03 + 1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./4-assert.yaml ++ expr 03 + 2 + CLUSTER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./05-assert.yaml + render_install_tracegen auto-provisioned 06 + '[' 2 -ne 2 ']' + jaeger=auto-provisioned + step=06 + replicas=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/tracegen.yaml -o ./06-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.replicas=1 ./06-install.yaml + sed -i s~simple-prod~auto-provisioned~gi ./06-install.yaml + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-tracegen.yaml.template -o ./06-assert.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running streaming E2E tests' Running streaming E2E tests + cd tests/e2e/streaming/_build + set +e + KUBECONFIG=/tmp/kubeconfig-282053367 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 4 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/streaming-simple === PAUSE kuttl/harness/streaming-simple === RUN kuttl/harness/streaming-with-autoprovisioning-autoscale === PAUSE kuttl/harness/streaming-with-autoprovisioning-autoscale === RUN kuttl/harness/streaming-with-tls === PAUSE kuttl/harness/streaming-with-tls === CONT kuttl/harness/artifacts logger.go:42: 09:28:43 | artifacts | Creating namespace: kuttl-test-thankful-titmouse logger.go:42: 09:28:43 | artifacts | artifacts events from ns kuttl-test-thankful-titmouse: logger.go:42: 09:28:43 | artifacts | Deleting namespace: kuttl-test-thankful-titmouse === CONT kuttl/harness/streaming-with-tls logger.go:42: 09:28:48 | streaming-with-tls | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:28:48 | streaming-with-tls | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:28:48 | streaming-with-tls | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:28:49 | streaming-with-tls | Creating namespace: kuttl-test-capital-sculpin logger.go:42: 09:28:49 | streaming-with-tls/0-install | starting test step 0-install logger.go:42: 09:28:49 | streaming-with-tls/0-install | running command: [sh -c cd /tmp/jaeger-tests && make undeploy-kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true] logger.go:42: 09:28:49 | streaming-with-tls/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 09:28:49 | streaming-with-tls/0-install | >>>> Skiping kafka-operator undeploy logger.go:42: 09:28:49 | streaming-with-tls/0-install | kubectl delete --namespace kuttl-test-capital-sculpin -f tests/_build/kafka-example.yaml 2>&1 || true logger.go:42: 09:28:49 | streaming-with-tls/0-install | error: the path "tests/_build/kafka-example.yaml" does not exist logger.go:42: 09:28:49 | streaming-with-tls/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 09:28:49 | streaming-with-tls/0-install | running command: [sh -c cd /tmp/jaeger-tests && make kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true KAFKA_VERSION=3.6.0] logger.go:42: 09:28:49 | streaming-with-tls/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 09:28:49 | streaming-with-tls/0-install | >>>> Creating namespace kuttl-test-capital-sculpin logger.go:42: 09:28:49 | streaming-with-tls/0-install | kubectl create namespace kuttl-test-capital-sculpin 2>&1 | grep -v "already exists" || true logger.go:42: 09:28:49 | streaming-with-tls/0-install | >>>> Skipping kafka-operator deployment, assuming it has been installed via OperatorHub logger.go:42: 09:28:49 | streaming-with-tls/0-install | >>>> Creating namespace kuttl-test-capital-sculpin logger.go:42: 09:28:49 | streaming-with-tls/0-install | mkdir -p tests/_build/ logger.go:42: 09:28:49 | streaming-with-tls/0-install | kubectl create namespace kuttl-test-capital-sculpin 2>&1 | grep -v "already exists" || true logger.go:42: 09:28:49 | streaming-with-tls/0-install | curl --fail --location "https://raw.githubusercontent.com/strimzi/strimzi-kafka-operator/3.6.0/examples/kafka/kafka-persistent-single.yaml" --output tests/_build/kafka-example.yaml --create-dirs logger.go:42: 09:28:51 | streaming-with-tls/0-install | % Total % Received % Xferd Average Speed Time Time Time Current logger.go:42: 09:28:51 | streaming-with-tls/0-install | Dload Upload Total Spent Left Speed logger.go:42: 09:28:51 | streaming-with-tls/0-install | 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 0 14 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 logger.go:42: 09:28:51 | streaming-with-tls/0-install | curl: (22) The requested URL returned error: 404 logger.go:42: 09:28:51 | streaming-with-tls/0-install | make[2]: *** [Makefile:252: kafka] Error 22 logger.go:42: 09:28:51 | streaming-with-tls/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' case.go:364: failed in step 0-install case.go:366: exit status 2 logger.go:42: 09:28:51 | streaming-with-tls | streaming-with-tls events from ns kuttl-test-capital-sculpin: logger.go:42: 09:28:51 | streaming-with-tls | Deleting namespace: kuttl-test-capital-sculpin === CONT kuttl/harness/streaming-simple logger.go:42: 09:28:57 | streaming-simple | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:28:57 | streaming-simple | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:28:57 | streaming-simple | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:28:57 | streaming-simple | Creating namespace: kuttl-test-pretty-porpoise logger.go:42: 09:28:57 | streaming-simple/0-install | starting test step 0-install logger.go:42: 09:28:57 | streaming-simple/0-install | running command: [sh -c cd /tmp/jaeger-tests && make undeploy-kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true] logger.go:42: 09:28:57 | streaming-simple/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 09:28:57 | streaming-simple/0-install | >>>> Skiping kafka-operator undeploy logger.go:42: 09:28:57 | streaming-simple/0-install | kubectl delete --namespace kuttl-test-pretty-porpoise -f tests/_build/kafka-example.yaml 2>&1 || true logger.go:42: 09:28:57 | streaming-simple/0-install | error: the path "tests/_build/kafka-example.yaml" does not exist logger.go:42: 09:28:57 | streaming-simple/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 09:28:57 | streaming-simple/0-install | running command: [sh -c cd /tmp/jaeger-tests && make kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true KAFKA_VERSION=3.6.0] logger.go:42: 09:28:57 | streaming-simple/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 09:28:57 | streaming-simple/0-install | >>>> Creating namespace kuttl-test-pretty-porpoise logger.go:42: 09:28:57 | streaming-simple/0-install | kubectl create namespace kuttl-test-pretty-porpoise 2>&1 | grep -v "already exists" || true logger.go:42: 09:28:58 | streaming-simple/0-install | >>>> Skipping kafka-operator deployment, assuming it has been installed via OperatorHub logger.go:42: 09:28:58 | streaming-simple/0-install | >>>> Creating namespace kuttl-test-pretty-porpoise logger.go:42: 09:28:58 | streaming-simple/0-install | mkdir -p tests/_build/ logger.go:42: 09:28:58 | streaming-simple/0-install | kubectl create namespace kuttl-test-pretty-porpoise 2>&1 | grep -v "already exists" || true logger.go:42: 09:28:58 | streaming-simple/0-install | curl --fail --location "https://raw.githubusercontent.com/strimzi/strimzi-kafka-operator/3.6.0/examples/kafka/kafka-persistent-single.yaml" --output tests/_build/kafka-example.yaml --create-dirs logger.go:42: 09:28:58 | streaming-simple/0-install | % Total % Received % Xferd Average Speed Time Time Time Current logger.go:42: 09:28:58 | streaming-simple/0-install | Dload Upload Total Spent Left Speed logger.go:42: 09:28:58 | streaming-simple/0-install | 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 0 14 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 logger.go:42: 09:28:58 | streaming-simple/0-install | curl: (22) The requested URL returned error: 404 logger.go:42: 09:28:58 | streaming-simple/0-install | make[2]: *** [Makefile:252: kafka] Error 22 logger.go:42: 09:28:58 | streaming-simple/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' case.go:364: failed in step 0-install case.go:366: exit status 2 logger.go:42: 09:28:58 | streaming-simple | streaming-simple events from ns kuttl-test-pretty-porpoise: logger.go:42: 09:28:58 | streaming-simple | Deleting namespace: kuttl-test-pretty-porpoise === CONT kuttl/harness/streaming-with-autoprovisioning-autoscale logger.go:42: 09:29:03 | streaming-with-autoprovisioning-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:29:03 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:29:03 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:29:04 | streaming-with-autoprovisioning-autoscale | Creating namespace: kuttl-test-merry-elephant logger.go:42: 09:29:04 | streaming-with-autoprovisioning-autoscale/1-install | starting test step 1-install logger.go:42: 09:29:04 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc create sa deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 09:29:04 | streaming-with-autoprovisioning-autoscale/1-install | serviceaccount/deploy-elasticsearch created logger.go:42: 09:29:04 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc adm policy add-scc-to-user privileged -z deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 09:29:04 | streaming-with-autoprovisioning-autoscale/1-install | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:privileged added: "deploy-elasticsearch" logger.go:42: 09:29:04 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 6] logger.go:42: 09:29:10 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_0.yml -n $NAMESPACE] logger.go:42: 09:29:10 | streaming-with-autoprovisioning-autoscale/1-install | statefulset.apps/elasticsearch created logger.go:42: 09:29:10 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 3] logger.go:42: 09:29:13 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_1.yml -n $NAMESPACE] logger.go:42: 09:29:14 | streaming-with-autoprovisioning-autoscale/1-install | service/elasticsearch created logger.go:42: 09:29:31 | streaming-with-autoprovisioning-autoscale/1-install | test step completed 1-install logger.go:42: 09:29:31 | streaming-with-autoprovisioning-autoscale/2-install | starting test step 2-install logger.go:42: 09:29:32 | streaming-with-autoprovisioning-autoscale/2-install | Jaeger:kuttl-test-merry-elephant/auto-provisioned created logger.go:42: 09:29:32 | streaming-with-autoprovisioning-autoscale/2-install | test step completed 2-install logger.go:42: 09:29:32 | streaming-with-autoprovisioning-autoscale/3- | starting test step 3- logger.go:42: 09:30:09 | streaming-with-autoprovisioning-autoscale/3- | test step completed 3- logger.go:42: 09:30:09 | streaming-with-autoprovisioning-autoscale/4- | starting test step 4- logger.go:42: 09:30:45 | streaming-with-autoprovisioning-autoscale/4- | test step completed 4- logger.go:42: 09:30:45 | streaming-with-autoprovisioning-autoscale/5- | starting test step 5- logger.go:42: 09:31:08 | streaming-with-autoprovisioning-autoscale/5- | test step completed 5- logger.go:42: 09:31:08 | streaming-with-autoprovisioning-autoscale/6-install | starting test step 6-install logger.go:42: 09:31:08 | streaming-with-autoprovisioning-autoscale/6-install | Deployment:kuttl-test-merry-elephant/tracegen created logger.go:42: 09:31:15 | streaming-with-autoprovisioning-autoscale/6-install | test step completed 6-install logger.go:42: 09:31:15 | streaming-with-autoprovisioning-autoscale/7- | starting test step 7- logger.go:42: 09:41:15 | streaming-with-autoprovisioning-autoscale/7- | test step failed 7- case.go:364: failed in step 7- case.go:366: --- Deployment:kuttl-test-merry-elephant/auto-provisioned-ingester +++ Deployment:kuttl-test-merry-elephant/auto-provisioned-ingester @@ -1,8 +1,320 @@ apiVersion: apps/v1 kind: Deployment metadata: + labels: + app: jaeger + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: auto-provisioned + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: auto-provisioned-ingester + app.kubernetes.io/part-of: jaeger + managedFields: + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:ownerReferences: + .: {} + k:{"uid":"a281ae9c-5f0f-4415-836b-6bd0c32161c3"}: {} + f:spec: + f:progressDeadlineSeconds: {} + f:replicas: {} + f:revisionHistoryLimit: {} + f:selector: {} + f:strategy: + f:type: {} + f:template: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.istio.io/inject: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:spec: + f:containers: + k:{"name":"jaeger-ingester"}: + .: {} + f:args: {} + f:env: + .: {} + k:{"name":"SPAN_STORAGE_TYPE"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:livenessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":14270,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:readinessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:resources: + .: {} + f:requests: + .: {} + f:memory: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/var/run/secrets/auto-provisioned"}: + .: {} + f:mountPath: {} + f:name: {} + k:{"mountPath":"/var/run/secrets/auto-provisioned-cluster-ca"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:enableServiceLinks: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:serviceAccount: {} + f:serviceAccountName: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"auto-provisioned-trusted-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"kafkauser-auto-provisioned"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"kafkauser-auto-provisioned-cluster-ca"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: jaeger-operator + operation: Update + time: "2023-12-11T09:31:09Z" + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:deployment.kubernetes.io/revision: {} + f:status: + f:availableReplicas: {} + f:conditions: + .: {} + k:{"type":"Available"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + k:{"type":"Progressing"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + f:observedGeneration: {} + f:readyReplicas: {} + f:replicas: {} + f:updatedReplicas: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2023-12-11T09:31:13Z" name: auto-provisioned-ingester namespace: kuttl-test-merry-elephant + ownerReferences: + - apiVersion: jaegertracing.io/v1 + controller: true + kind: Jaeger + name: auto-provisioned + uid: a281ae9c-5f0f-4415-836b-6bd0c32161c3 +spec: + progressDeadlineSeconds: 600 + replicas: 1 + revisionHistoryLimit: 10 + selector: + matchLabels: + app: jaeger + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: auto-provisioned + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: auto-provisioned-ingester + app.kubernetes.io/part-of: jaeger + strategy: + type: Recreate + template: + metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "14270" + prometheus.io/scrape: "true" + sidecar.istio.io/inject: "false" + creationTimestamp: null + labels: + app: jaeger + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: auto-provisioned + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: auto-provisioned-ingester + app.kubernetes.io/part-of: jaeger + spec: + containers: + - args: + - --es.server-urls=http://elasticsearch:9200 + - --kafka.consumer.authentication=tls + - --kafka.consumer.brokers=auto-provisioned-kafka-bootstrap.kuttl-test-merry-elephant.svc.cluster.local:9093 + - --kafka.consumer.tls.ca=/var/run/secrets/auto-provisioned-cluster-ca/ca.crt + - --kafka.consumer.tls.cert=/var/run/secrets/auto-provisioned/user.crt + - --kafka.consumer.tls.enabled=true + - --kafka.consumer.tls.key=/var/run/secrets/auto-provisioned/user.key + env: + - name: SPAN_STORAGE_TYPE + value: elasticsearch + image: registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:849018528225b7370cc4740fc9f94bef7ffd4195328a916a6013d88f885eebe2 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 14270 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-ingester + ports: + - containerPort: 14270 + name: admin-http + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 14270 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: + requests: + memory: 500m + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /var/run/secrets/auto-provisioned + name: kafkauser-auto-provisioned + - mountPath: /var/run/secrets/auto-provisioned-cluster-ca + name: kafkauser-auto-provisioned-cluster-ca + - mountPath: /etc/pki/ca-trust/extracted/pem + name: auto-provisioned-trusted-ca + readOnly: true + dnsPolicy: ClusterFirst + enableServiceLinks: false + restartPolicy: Always + schedulerName: default-scheduler + securityContext: {} + serviceAccount: auto-provisioned + serviceAccountName: auto-provisioned + terminationGracePeriodSeconds: 30 + volumes: + - name: kafkauser-auto-provisioned + secret: + defaultMode: 420 + secretName: auto-provisioned + - name: kafkauser-auto-provisioned-cluster-ca + secret: + defaultMode: 420 + secretName: auto-provisioned-cluster-ca-cert + - configMap: + defaultMode: 420 + items: + - key: ca-bundle.crt + path: tls-ca-bundle.pem + name: auto-provisioned-trusted-ca + name: auto-provisioned-trusted-ca status: - readyReplicas: 2 + availableReplicas: 1 + conditions: + - lastTransitionTime: "2023-12-11T09:31:13Z" + lastUpdateTime: "2023-12-11T09:31:13Z" + message: Deployment has minimum availability. + reason: MinimumReplicasAvailable + status: "True" + type: Available + - lastTransitionTime: "2023-12-11T09:31:09Z" + lastUpdateTime: "2023-12-11T09:31:13Z" + message: ReplicaSet "auto-provisioned-ingester-c76cf4dcf" has successfully progressed. + reason: NewReplicaSetAvailable + status: "True" + type: Progressing + observedGeneration: 1 + readyReplicas: 1 + replicas: 1 + updatedReplicas: 1 case.go:366: resource Deployment:kuttl-test-merry-elephant/auto-provisioned-ingester: .status.readyReplicas: value mismatch, expected: 2 != actual: 1 logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | streaming-with-autoprovisioning-autoscale events from ns kuttl-test-merry-elephant: logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:29:10 +0000 UTC Normal Pod elasticsearch-0 Binding Scheduled Successfully assigned kuttl-test-merry-elephant/elasticsearch-0 to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:29:10 +0000 UTC Normal StatefulSet.apps elasticsearch SuccessfulCreate create Pod elasticsearch-0 in StatefulSet elasticsearch successful statefulset-controller logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:29:11 +0000 UTC Normal Pod elasticsearch-0 AddedInterface Add eth0 [10.129.2.77/23] from ovn-kubernetes logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:29:11 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulling Pulling image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:29:20 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulled Successfully pulled image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" in 8.827s (8.827s including waiting) kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:29:20 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:29:20 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:29:26 +0000 UTC Warning Pod elasticsearch-0.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Get "http://10.129.2.77:9200/": dial tcp 10.129.2.77:9200: connect: connection refused kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:29:36 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-zookeeper NoPods No matching pods found controllermanager logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:29:36 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:29:36 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-merry-elephant/data-auto-provisioned-zookeeper-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-74b54944c8-99gpt_8201e0f8-9afa-4504-b698-cbc6cdf4803e logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:29:36 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:29:43 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-eea09685-1b26-432f-882e-2dd942cdcc7b ebs.csi.aws.com_aws-ebs-csi-driver-controller-74b54944c8-99gpt_8201e0f8-9afa-4504-b698-cbc6cdf4803e logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:29:44 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-merry-elephant/auto-provisioned-zookeeper-0 to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:29:46 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-eea09685-1b26-432f-882e-2dd942cdcc7b" attachdetach-controller logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:29:49 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 AddedInterface Add eth0 [10.131.0.100/23] from ovn-kubernetes logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:29:49 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulled Container image "registry.redhat.io/amq-streams/kafka-36-rhel8@sha256:55b18000b2e774adccd0d5393b58fc3b8358ab174eaaa3ada9d520b5cde30669" already present on machine kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:29:49 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:29:49 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:10 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:11 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-kafka NoPods No matching pods found controllermanager logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:11 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:11 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-merry-elephant/data-0-auto-provisioned-kafka-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-74b54944c8-99gpt_8201e0f8-9afa-4504-b698-cbc6cdf4803e logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:15 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-b27d7bd7-407f-4c2e-95bb-6bb7deb3d327 ebs.csi.aws.com_aws-ebs-csi-driver-controller-74b54944c8-99gpt_8201e0f8-9afa-4504-b698-cbc6cdf4803e logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:16 +0000 UTC Normal Pod auto-provisioned-kafka-0 Binding Scheduled Successfully assigned kuttl-test-merry-elephant/auto-provisioned-kafka-0 to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:17 +0000 UTC Normal Pod auto-provisioned-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-b27d7bd7-407f-4c2e-95bb-6bb7deb3d327" attachdetach-controller logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:25 +0000 UTC Normal Pod auto-provisioned-kafka-0 AddedInterface Add eth0 [10.128.2.87/23] from ovn-kubernetes logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:25 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Pulled Container image "registry.redhat.io/amq-streams/kafka-36-rhel8@sha256:55b18000b2e774adccd0d5393b58fc3b8358ab174eaaa3ada9d520b5cde30669" already present on machine kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:25 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:25 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:46 +0000 UTC Normal Pod auto-provisioned-entity-operator-86bbdd7d66-vvkvv Binding Scheduled Successfully assigned kuttl-test-merry-elephant/auto-provisioned-entity-operator-86bbdd7d66-vvkvv to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:46 +0000 UTC Normal Pod auto-provisioned-entity-operator-86bbdd7d66-vvkvv AddedInterface Add eth0 [10.128.2.88/23] from ovn-kubernetes logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:46 +0000 UTC Normal Pod auto-provisioned-entity-operator-86bbdd7d66-vvkvv.spec.containers{topic-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:f1be1aa2f18276f9169893eb55e3733cd52fa38f2101a9b3925f79774841689f" already present on machine kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:46 +0000 UTC Normal Pod auto-provisioned-entity-operator-86bbdd7d66-vvkvv.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:46 +0000 UTC Normal Pod auto-provisioned-entity-operator-86bbdd7d66-vvkvv.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:46 +0000 UTC Normal Pod auto-provisioned-entity-operator-86bbdd7d66-vvkvv.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:f1be1aa2f18276f9169893eb55e3733cd52fa38f2101a9b3925f79774841689f" already present on machine kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:46 +0000 UTC Normal Pod auto-provisioned-entity-operator-86bbdd7d66-vvkvv.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:46 +0000 UTC Normal Pod auto-provisioned-entity-operator-86bbdd7d66-vvkvv.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:46 +0000 UTC Normal Pod auto-provisioned-entity-operator-86bbdd7d66-vvkvv.spec.containers{tls-sidecar} Pulled Container image "registry.redhat.io/amq-streams/kafka-36-rhel8@sha256:55b18000b2e774adccd0d5393b58fc3b8358ab174eaaa3ada9d520b5cde30669" already present on machine kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:46 +0000 UTC Normal ReplicaSet.apps auto-provisioned-entity-operator-86bbdd7d66 SuccessfulCreate Created pod: auto-provisioned-entity-operator-86bbdd7d66-vvkvv replicaset-controller logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:46 +0000 UTC Normal Deployment.apps auto-provisioned-entity-operator ScalingReplicaSet Scaled up replica set auto-provisioned-entity-operator-86bbdd7d66 to 1 deployment-controller logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:47 +0000 UTC Normal Pod auto-provisioned-entity-operator-86bbdd7d66-vvkvv.spec.containers{tls-sidecar} Created Created container tls-sidecar kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:30:47 +0000 UTC Normal Pod auto-provisioned-entity-operator-86bbdd7d66-vvkvv.spec.containers{tls-sidecar} Started Started container tls-sidecar kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:08 +0000 UTC Normal Pod tracegen-56f88946cd-vdfgk Binding Scheduled Successfully assigned kuttl-test-merry-elephant/tracegen-56f88946cd-vdfgk to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:08 +0000 UTC Warning Pod tracegen-56f88946cd-vdfgk FailedMount MountVolume.SetUp failed for volume "auto-provisioned-trusted-ca" : configmap "auto-provisioned-trusted-ca" not found kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:08 +0000 UTC Warning Pod tracegen-56f88946cd-vdfgk FailedMount MountVolume.SetUp failed for volume "auto-provisioned-service-ca" : configmap "auto-provisioned-service-ca" not found kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:08 +0000 UTC Normal ReplicaSet.apps tracegen-56f88946cd SuccessfulCreate Created pod: tracegen-56f88946cd-vdfgk replicaset-controller logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:08 +0000 UTC Normal Deployment.apps tracegen ScalingReplicaSet Scaled up replica set tracegen-56f88946cd to 1 deployment-controller logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:09 +0000 UTC Normal Pod auto-provisioned-collector-869d89b9f7-sf5z2 Binding Scheduled Successfully assigned kuttl-test-merry-elephant/auto-provisioned-collector-869d89b9f7-sf5z2 to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:09 +0000 UTC Warning Pod auto-provisioned-collector-869d89b9f7-sf5z2 FailedMount MountVolume.SetUp failed for volume "auto-provisioned-collector-tls-config-volume" : secret "auto-provisioned-collector-headless-tls" not found kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:09 +0000 UTC Normal ReplicaSet.apps auto-provisioned-collector-869d89b9f7 SuccessfulCreate Created pod: auto-provisioned-collector-869d89b9f7-sf5z2 replicaset-controller logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:09 +0000 UTC Normal Deployment.apps auto-provisioned-collector ScalingReplicaSet Scaled up replica set auto-provisioned-collector-869d89b9f7 to 1 deployment-controller logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:09 +0000 UTC Normal Pod auto-provisioned-ingester-c76cf4dcf-q2fl2 Binding Scheduled Successfully assigned kuttl-test-merry-elephant/auto-provisioned-ingester-c76cf4dcf-q2fl2 to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:09 +0000 UTC Normal ReplicaSet.apps auto-provisioned-ingester-c76cf4dcf SuccessfulCreate Created pod: auto-provisioned-ingester-c76cf4dcf-q2fl2 replicaset-controller logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:09 +0000 UTC Normal Deployment.apps auto-provisioned-ingester ScalingReplicaSet Scaled up replica set auto-provisioned-ingester-c76cf4dcf to 1 deployment-controller logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:09 +0000 UTC Normal Pod auto-provisioned-query-768c57d4c9-tlfd2 Binding Scheduled Successfully assigned kuttl-test-merry-elephant/auto-provisioned-query-768c57d4c9-tlfd2 to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:09 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-768c57d4c9 SuccessfulCreate Created pod: auto-provisioned-query-768c57d4c9-tlfd2 replicaset-controller logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:09 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-768c57d4c9 to 1 deployment-controller logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:10 +0000 UTC Normal Pod auto-provisioned-collector-869d89b9f7-sf5z2 AddedInterface Add eth0 [10.131.0.102/23] from ovn-kubernetes logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:10 +0000 UTC Normal Pod auto-provisioned-collector-869d89b9f7-sf5z2.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:10 +0000 UTC Normal Pod auto-provisioned-collector-869d89b9f7-sf5z2.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:10 +0000 UTC Normal Pod auto-provisioned-collector-869d89b9f7-sf5z2.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:10 +0000 UTC Normal Pod auto-provisioned-ingester-c76cf4dcf-q2fl2 AddedInterface Add eth0 [10.129.2.79/23] from ovn-kubernetes logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:10 +0000 UTC Normal Pod auto-provisioned-ingester-c76cf4dcf-q2fl2.spec.containers{jaeger-ingester} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:849018528225b7370cc4740fc9f94bef7ffd4195328a916a6013d88f885eebe2" kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:10 +0000 UTC Normal Pod auto-provisioned-query-768c57d4c9-tlfd2 AddedInterface Add eth0 [10.131.0.103/23] from ovn-kubernetes logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:10 +0000 UTC Normal Pod auto-provisioned-query-768c57d4c9-tlfd2.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:10 +0000 UTC Normal Pod auto-provisioned-query-768c57d4c9-tlfd2.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:10 +0000 UTC Normal Pod auto-provisioned-query-768c57d4c9-tlfd2.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:10 +0000 UTC Normal Pod auto-provisioned-query-768c57d4c9-tlfd2.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:10 +0000 UTC Normal Pod auto-provisioned-query-768c57d4c9-tlfd2.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:10 +0000 UTC Normal Pod auto-provisioned-query-768c57d4c9-tlfd2.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:10 +0000 UTC Normal Pod auto-provisioned-query-768c57d4c9-tlfd2.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:10 +0000 UTC Normal Pod auto-provisioned-query-768c57d4c9-tlfd2.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:10 +0000 UTC Normal Pod auto-provisioned-query-768c57d4c9-tlfd2.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:10 +0000 UTC Normal Pod tracegen-56f88946cd-vdfgk AddedInterface Add eth0 [10.129.2.78/23] from ovn-kubernetes logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:10 +0000 UTC Normal Pod tracegen-56f88946cd-vdfgk.spec.containers{tracegen} Pulling Pulling image "jaegertracing/jaeger-tracegen:1.51.0" kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:12 +0000 UTC Normal Pod auto-provisioned-ingester-c76cf4dcf-q2fl2.spec.containers{jaeger-ingester} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:849018528225b7370cc4740fc9f94bef7ffd4195328a916a6013d88f885eebe2" in 2.516s (2.516s including waiting) kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:12 +0000 UTC Normal Pod auto-provisioned-ingester-c76cf4dcf-q2fl2.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:12 +0000 UTC Normal Pod auto-provisioned-ingester-c76cf4dcf-q2fl2.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:13 +0000 UTC Normal Pod tracegen-56f88946cd-vdfgk.spec.containers{tracegen} Pulled Successfully pulled image "jaegertracing/jaeger-tracegen:1.51.0" in 2.895s (2.895s including waiting) kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:13 +0000 UTC Normal Pod tracegen-56f88946cd-vdfgk.spec.containers{tracegen} Created Created container tracegen kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:13 +0000 UTC Normal Pod tracegen-56f88946cd-vdfgk.spec.containers{tracegen} Started Started container tracegen kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:13 +0000 UTC Normal Pod tracegen-56f88946cd-vdfgk.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:13 +0000 UTC Normal Pod tracegen-56f88946cd-vdfgk.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:13 +0000 UTC Normal Pod tracegen-56f88946cd-vdfgk.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:24 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:24 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:24 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:24 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:24 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:31:24 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:32:09 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:32:09 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod auto-provisioned-collector-869d89b9f7-sf5z2 horizontal-pod-autoscaler logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:32:09 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:32:09 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:32:09 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedComputeMetricsReplicas invalid metrics (1 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:36:25 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod auto-provisioned-collector-869d89b9f7-sf5z2 horizontal-pod-autoscaler logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | 2023-12-11 09:36:25 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-ingester of Pod auto-provisioned-ingester-c76cf4dcf-q2fl2 horizontal-pod-autoscaler logger.go:42: 09:41:16 | streaming-with-autoprovisioning-autoscale | Deleting namespace: kuttl-test-merry-elephant === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- FAIL: kuttl (850.17s) --- FAIL: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.70s) --- FAIL: kuttl/harness/streaming-with-tls (8.80s) --- FAIL: kuttl/harness/streaming-simple (6.24s) --- FAIL: kuttl/harness/streaming-with-autoprovisioning-autoscale (744.60s) FAIL + exit_code=1 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name streaming --report --output /logs/artifacts/streaming.xml ./artifacts/kuttl-report.xml time="2023-12-11T09:42:53Z" level=debug msg="Setting a new name for the test suites" time="2023-12-11T09:42:53Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-11T09:42:53Z" level=debug msg="normalizing test case names" time="2023-12-11T09:42:53Z" level=debug msg="streaming/artifacts -> streaming_artifacts" time="2023-12-11T09:42:53Z" level=debug msg="streaming/streaming-with-tls -> streaming_streaming_with_tls" time="2023-12-11T09:42:53Z" level=debug msg="streaming/streaming-simple -> streaming_streaming_simple" time="2023-12-11T09:42:53Z" level=debug msg="streaming/streaming-with-autoprovisioning-autoscale -> streaming_streaming_with_autoprovisioning_autoscale" +-----------------------------------------------------+--------+ | NAME | RESULT | +-----------------------------------------------------+--------+ | streaming_artifacts | passed | | streaming_streaming_with_tls | failed | | streaming_streaming_simple | failed | | streaming_streaming_with_autoprovisioning_autoscale | failed | +-----------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh ui false true + '[' 3 -ne 3 ']' + test_suite_name=ui + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/ui.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-ui make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true ./tests/e2e/ui/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-09-012410 True False 177m Cluster version is 4.15.0-0.nightly-2023-12-09-012410' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-09-012410 True False 177m Cluster version is 4.15.0-0.nightly-2023-12-09-012410' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/ui/render.sh ++ export SUITE_DIR=./tests/e2e/ui ++ SUITE_DIR=./tests/e2e/ui ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/ui ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test allinone + '[' 1 -ne 1 ']' + test_name=allinone + echo =========================================================================== =========================================================================== + info 'Rendering files for test allinone' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test allinone\e[0m' Rendering files for test allinone + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build + '[' _build '!=' _build ']' + mkdir -p allinone + cd allinone + export GET_URL_COMMAND + export URL + export JAEGER_NAME=all-in-one-ui + JAEGER_NAME=all-in-one-ui + '[' true = true ']' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./01-curl.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./04-test-ui-config.yaml + start_test production + '[' 1 -ne 1 ']' + test_name=production + echo =========================================================================== =========================================================================== + info 'Rendering files for test production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test production\e[0m' Rendering files for test production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build/allinone + '[' allinone '!=' _build ']' + cd .. + mkdir -p production + cd production + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + [[ true = true ]] + [[ true = true ]] + render_install_jaeger production-ui production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + '[' true = true ']' + INSECURE=true + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-forbbiden-access.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-curl.yaml + INSECURE=true + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./05-check-disabled-security.yaml + ASSERT_PRESENT=false + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./06-check-NO-gaID.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./08-check-gaID.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running ui E2E tests' Running ui E2E tests + cd tests/e2e/ui/_build + set +e + KUBECONFIG=/tmp/kubeconfig-282053367 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 3 tests === RUN kuttl/harness === RUN kuttl/harness/allinone === PAUSE kuttl/harness/allinone === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/production === PAUSE kuttl/harness/production === CONT kuttl/harness/allinone logger.go:42: 09:43:26 | allinone | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:43:27 | allinone | Creating namespace: kuttl-test-desired-flea logger.go:42: 09:43:27 | allinone/0-install | starting test step 0-install logger.go:42: 09:43:27 | allinone/0-install | Jaeger:kuttl-test-desired-flea/all-in-one-ui created logger.go:42: 09:43:31 | allinone/0-install | test step completed 0-install logger.go:42: 09:43:31 | allinone/1-curl | starting test step 1-curl logger.go:42: 09:43:31 | allinone/1-curl | running command: [./ensure-ingress-host.sh] logger.go:42: 09:43:31 | allinone/1-curl | Checking the Ingress host value was populated logger.go:42: 09:43:31 | allinone/1-curl | Try number 0 logger.go:42: 09:43:31 | allinone/1-curl | Hostname is all-in-one-ui-kuttl-test-desired-flea.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com logger.go:42: 09:43:31 | allinone/1-curl | running command: [sh -c ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE all-in-one-ui] logger.go:42: 09:43:32 | allinone/1-curl | Checking an expected HTTP response logger.go:42: 09:43:32 | allinone/1-curl | Running in OpenShift logger.go:42: 09:43:32 | allinone/1-curl | User not provided. Getting the token... logger.go:42: 09:43:33 | allinone/1-curl | Warning: resource jaegers/all-in-one-ui is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 09:43:40 | allinone/1-curl | Try number 1/30 the https://all-in-one-ui-kuttl-test-desired-flea.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:43:41 | allinone/1-curl | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 09:43:41 | allinone/1-curl | Try number 2/30 the https://all-in-one-ui-kuttl-test-desired-flea.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:43:41 | allinone/1-curl | HTTP response is 503. 200 expected. Waiting 10 s logger.go:42: 09:43:51 | allinone/1-curl | Try number 3/30 the https://all-in-one-ui-kuttl-test-desired-flea.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:43:51 | allinone/1-curl | curl response asserted properly logger.go:42: 09:43:51 | allinone/1-curl | test step completed 1-curl logger.go:42: 09:43:51 | allinone/2-delete | starting test step 2-delete logger.go:42: 09:43:51 | allinone/2-delete | Jaeger:kuttl-test-desired-flea/all-in-one-ui created logger.go:42: 09:43:51 | allinone/2-delete | test step completed 2-delete logger.go:42: 09:43:51 | allinone/3-install | starting test step 3-install logger.go:42: 09:43:51 | allinone/3-install | Jaeger:kuttl-test-desired-flea/all-in-one-ui updated logger.go:42: 09:43:51 | allinone/3-install | test step completed 3-install logger.go:42: 09:43:51 | allinone/4-test-ui-config | starting test step 4-test-ui-config logger.go:42: 09:43:51 | allinone/4-test-ui-config | running command: [./ensure-ingress-host.sh] logger.go:42: 09:43:51 | allinone/4-test-ui-config | Checking the Ingress host value was populated logger.go:42: 09:43:51 | allinone/4-test-ui-config | Try number 0 logger.go:42: 09:43:51 | allinone/4-test-ui-config | error: error executing jsonpath "{.items[0].status.ingress[0].host}": Error executing template: array index out of bounds: index 0, length 0. Printing more information for debugging the template: logger.go:42: 09:43:51 | allinone/4-test-ui-config | template was: logger.go:42: 09:43:51 | allinone/4-test-ui-config | {.items[0].status.ingress[0].host} logger.go:42: 09:43:51 | allinone/4-test-ui-config | object given to jsonpath engine was: logger.go:42: 09:43:51 | allinone/4-test-ui-config | map[string]interface {}{"apiVersion":"v1", "items":[]interface {}{}, "kind":"List", "metadata":map[string]interface {}{"resourceVersion":""}} logger.go:42: 09:43:51 | allinone/4-test-ui-config | logger.go:42: 09:43:51 | allinone/4-test-ui-config | logger.go:42: 09:44:01 | allinone/4-test-ui-config | Try number 1 logger.go:42: 09:44:01 | allinone/4-test-ui-config | Hostname is all-in-one-ui-kuttl-test-desired-flea.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com logger.go:42: 09:44:01 | allinone/4-test-ui-config | running command: [sh -c ASSERT_PRESENT=true EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 09:44:15 | allinone/4-test-ui-config | time="2023-12-11T09:44:15Z" level=info msg="Querying https://all-in-one-ui-kuttl-test-desired-flea.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search..." logger.go:42: 09:44:15 | allinone/4-test-ui-config | time="2023-12-11T09:44:15Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 09:44:15 | allinone/4-test-ui-config | time="2023-12-11T09:44:15Z" level=info msg="Polling to https://all-in-one-ui-kuttl-test-desired-flea.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search" logger.go:42: 09:44:15 | allinone/4-test-ui-config | time="2023-12-11T09:44:15Z" level=info msg="Doing request number 0" logger.go:42: 09:44:15 | allinone/4-test-ui-config | time="2023-12-11T09:44:15Z" level=info msg="Content found and asserted!" logger.go:42: 09:44:15 | allinone/4-test-ui-config | time="2023-12-11T09:44:15Z" level=info msg="Success!" logger.go:42: 09:44:15 | allinone/4-test-ui-config | test step completed 4-test-ui-config logger.go:42: 09:44:15 | allinone | allinone events from ns kuttl-test-desired-flea: logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:31 +0000 UTC Normal Pod all-in-one-ui-5665fdcbf6-mnz8s Binding Scheduled Successfully assigned kuttl-test-desired-flea/all-in-one-ui-5665fdcbf6-mnz8s to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:31 +0000 UTC Warning Pod all-in-one-ui-5665fdcbf6-mnz8s FailedMount MountVolume.SetUp failed for volume "all-in-one-ui-collector-tls-config-volume" : secret "all-in-one-ui-collector-headless-tls" not found kubelet logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:31 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-5665fdcbf6 SuccessfulCreate Created pod: all-in-one-ui-5665fdcbf6-mnz8s replicaset-controller logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:31 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-5665fdcbf6 to 1 deployment-controller logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:32 +0000 UTC Normal Pod all-in-one-ui-5665fdcbf6-mnz8s AddedInterface Add eth0 [10.129.2.80/23] from ovn-kubernetes logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:32 +0000 UTC Normal Pod all-in-one-ui-5665fdcbf6-mnz8s.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:32 +0000 UTC Normal Pod all-in-one-ui-5665fdcbf6-mnz8s.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:32 +0000 UTC Normal Pod all-in-one-ui-5665fdcbf6-mnz8s.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:32 +0000 UTC Normal Pod all-in-one-ui-5665fdcbf6-mnz8s.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:32 +0000 UTC Normal Pod all-in-one-ui-5665fdcbf6-mnz8s.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:32 +0000 UTC Normal Pod all-in-one-ui-5665fdcbf6-mnz8s.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:35 +0000 UTC Normal Pod all-in-one-ui-5665fdcbf6-mnz8s.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:35 +0000 UTC Normal Pod all-in-one-ui-5665fdcbf6-mnz8s.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:35 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-5665fdcbf6 SuccessfulDelete Deleted pod: all-in-one-ui-5665fdcbf6-mnz8s replicaset-controller logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:35 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled down replica set all-in-one-ui-5665fdcbf6 to 0 from 1 deployment-controller logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:36 +0000 UTC Normal Pod all-in-one-ui-6857f9647d-l7nsn Binding Scheduled Successfully assigned kuttl-test-desired-flea/all-in-one-ui-6857f9647d-l7nsn to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:36 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-6857f9647d SuccessfulCreate Created pod: all-in-one-ui-6857f9647d-l7nsn replicaset-controller logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:36 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-6857f9647d to 1 deployment-controller logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:37 +0000 UTC Normal Pod all-in-one-ui-6857f9647d-l7nsn AddedInterface Add eth0 [10.129.2.81/23] from ovn-kubernetes logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:37 +0000 UTC Normal Pod all-in-one-ui-6857f9647d-l7nsn.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:37 +0000 UTC Normal Pod all-in-one-ui-6857f9647d-l7nsn.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:37 +0000 UTC Normal Pod all-in-one-ui-6857f9647d-l7nsn.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:37 +0000 UTC Normal Pod all-in-one-ui-6857f9647d-l7nsn.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:37 +0000 UTC Normal Pod all-in-one-ui-6857f9647d-l7nsn.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:37 +0000 UTC Normal Pod all-in-one-ui-6857f9647d-l7nsn.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:51 +0000 UTC Normal Pod all-in-one-ui-6857f9647d-l7nsn.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:51 +0000 UTC Normal Pod all-in-one-ui-6857f9647d-l7nsn.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:57 +0000 UTC Normal Pod all-in-one-ui-896969b85-cnq96 Binding Scheduled Successfully assigned kuttl-test-desired-flea/all-in-one-ui-896969b85-cnq96 to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:57 +0000 UTC Normal Pod all-in-one-ui-896969b85-cnq96 AddedInterface Add eth0 [10.129.2.82/23] from ovn-kubernetes logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:57 +0000 UTC Normal Pod all-in-one-ui-896969b85-cnq96.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:57 +0000 UTC Normal Pod all-in-one-ui-896969b85-cnq96.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:57 +0000 UTC Normal Pod all-in-one-ui-896969b85-cnq96.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:57 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-896969b85 SuccessfulCreate Created pod: all-in-one-ui-896969b85-cnq96 replicaset-controller logger.go:42: 09:44:15 | allinone | 2023-12-11 09:43:57 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-896969b85 to 1 deployment-controller logger.go:42: 09:44:15 | allinone | Deleting namespace: kuttl-test-desired-flea === CONT kuttl/harness/production logger.go:42: 09:44:21 | production | Ignoring add-tracking-id.yaml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:44:21 | production | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:44:22 | production | Creating namespace: kuttl-test-fond-reptile logger.go:42: 09:44:22 | production/1-install | starting test step 1-install logger.go:42: 09:44:22 | production/1-install | Jaeger:kuttl-test-fond-reptile/production-ui created logger.go:42: 09:44:57 | production/1-install | test step completed 1-install logger.go:42: 09:44:57 | production/2-check-forbbiden-access | starting test step 2-check-forbbiden-access logger.go:42: 09:44:57 | production/2-check-forbbiden-access | running command: [./ensure-ingress-host.sh] logger.go:42: 09:45:07 | production/2-check-forbbiden-access | Checking the Ingress host value was populated logger.go:42: 09:45:07 | production/2-check-forbbiden-access | Try number 0 logger.go:42: 09:45:07 | production/2-check-forbbiden-access | Hostname is production-ui-kuttl-test-fond-reptile.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com logger.go:42: 09:45:07 | production/2-check-forbbiden-access | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE production-ui] logger.go:42: 09:45:07 | production/2-check-forbbiden-access | Checking an expected HTTP response logger.go:42: 09:45:07 | production/2-check-forbbiden-access | Running in OpenShift logger.go:42: 09:45:07 | production/2-check-forbbiden-access | Not using any secret logger.go:42: 09:45:07 | production/2-check-forbbiden-access | Try number 1/30 the https://production-ui-kuttl-test-fond-reptile.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:45:07 | production/2-check-forbbiden-access | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 09:45:07 | production/2-check-forbbiden-access | Try number 2/30 the https://production-ui-kuttl-test-fond-reptile.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:45:07 | production/2-check-forbbiden-access | curl response asserted properly logger.go:42: 09:45:07 | production/2-check-forbbiden-access | test step completed 2-check-forbbiden-access logger.go:42: 09:45:07 | production/3-curl | starting test step 3-curl logger.go:42: 09:45:07 | production/3-curl | running command: [./ensure-ingress-host.sh] logger.go:42: 09:45:07 | production/3-curl | Checking the Ingress host value was populated logger.go:42: 09:45:07 | production/3-curl | Try number 0 logger.go:42: 09:45:07 | production/3-curl | Hostname is production-ui-kuttl-test-fond-reptile.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com logger.go:42: 09:45:07 | production/3-curl | running command: [sh -c ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE production-ui] logger.go:42: 09:45:08 | production/3-curl | Checking an expected HTTP response logger.go:42: 09:45:08 | production/3-curl | Running in OpenShift logger.go:42: 09:45:08 | production/3-curl | User not provided. Getting the token... logger.go:42: 09:50:08 | production/3-curl | Warning: resource jaegers/production-ui is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 09:50:14 | production/3-curl | Try number 1/30 the https://production-ui-kuttl-test-fond-reptile.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:50:14 | production/3-curl | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 09:50:14 | production/3-curl | Try number 2/30 the https://production-ui-kuttl-test-fond-reptile.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:50:14 | production/3-curl | curl response asserted properly logger.go:42: 09:50:14 | production/3-curl | test step completed 3-curl logger.go:42: 09:50:14 | production/4-install | starting test step 4-install logger.go:42: 09:50:14 | production/4-install | Jaeger:kuttl-test-fond-reptile/production-ui updated logger.go:42: 09:50:14 | production/4-install | test step completed 4-install logger.go:42: 09:50:14 | production/5-check-disabled-security | starting test step 5-check-disabled-security logger.go:42: 09:50:14 | production/5-check-disabled-security | running command: [./ensure-ingress-host.sh] logger.go:42: 09:50:14 | production/5-check-disabled-security | Checking the Ingress host value was populated logger.go:42: 09:50:14 | production/5-check-disabled-security | Try number 0 logger.go:42: 09:50:15 | production/5-check-disabled-security | Hostname is production-ui-kuttl-test-fond-reptile.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com logger.go:42: 09:50:15 | production/5-check-disabled-security | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE production-ui] logger.go:42: 09:50:15 | production/5-check-disabled-security | Checking an expected HTTP response logger.go:42: 09:50:15 | production/5-check-disabled-security | Running in OpenShift logger.go:42: 09:50:15 | production/5-check-disabled-security | Not using any secret logger.go:42: 09:50:15 | production/5-check-disabled-security | Try number 1/30 the https://production-ui-kuttl-test-fond-reptile.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:50:15 | production/5-check-disabled-security | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 09:50:15 | production/5-check-disabled-security | Try number 2/30 the https://production-ui-kuttl-test-fond-reptile.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:50:15 | production/5-check-disabled-security | HTTP response is 403. 200 expected. Waiting 10 s logger.go:42: 09:50:25 | production/5-check-disabled-security | Try number 3/30 the https://production-ui-kuttl-test-fond-reptile.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:50:25 | production/5-check-disabled-security | curl response asserted properly logger.go:42: 09:50:25 | production/5-check-disabled-security | test step completed 5-check-disabled-security logger.go:42: 09:50:25 | production/6-check-NO-gaID | starting test step 6-check-NO-gaID logger.go:42: 09:50:25 | production/6-check-NO-gaID | running command: [./ensure-ingress-host.sh] logger.go:42: 09:50:25 | production/6-check-NO-gaID | Checking the Ingress host value was populated logger.go:42: 09:50:25 | production/6-check-NO-gaID | Try number 0 logger.go:42: 09:50:25 | production/6-check-NO-gaID | Hostname is production-ui-kuttl-test-fond-reptile.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com logger.go:42: 09:50:25 | production/6-check-NO-gaID | running command: [sh -c ASSERT_PRESENT=false EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 09:50:26 | production/6-check-NO-gaID | time="2023-12-11T09:50:26Z" level=info msg="Querying https://production-ui-kuttl-test-fond-reptile.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search..." logger.go:42: 09:50:26 | production/6-check-NO-gaID | time="2023-12-11T09:50:26Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 09:50:26 | production/6-check-NO-gaID | time="2023-12-11T09:50:26Z" level=info msg="Polling to https://production-ui-kuttl-test-fond-reptile.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search" logger.go:42: 09:50:26 | production/6-check-NO-gaID | time="2023-12-11T09:50:26Z" level=info msg="Doing request number 0" logger.go:42: 09:50:27 | production/6-check-NO-gaID | time="2023-12-11T09:50:27Z" level=info msg="Content not found and asserted it was not found!" logger.go:42: 09:50:27 | production/6-check-NO-gaID | time="2023-12-11T09:50:27Z" level=info msg="Success!" logger.go:42: 09:50:27 | production/6-check-NO-gaID | test step completed 6-check-NO-gaID logger.go:42: 09:50:27 | production/7-add-tracking-id | starting test step 7-add-tracking-id logger.go:42: 09:50:27 | production/7-add-tracking-id | running command: [sh -c kubectl apply -f add-tracking-id.yaml -n $NAMESPACE] logger.go:42: 09:50:27 | production/7-add-tracking-id | jaeger.jaegertracing.io/production-ui configured logger.go:42: 09:50:27 | production/7-add-tracking-id | test step completed 7-add-tracking-id logger.go:42: 09:50:27 | production/8-check-gaID | starting test step 8-check-gaID logger.go:42: 09:50:27 | production/8-check-gaID | running command: [./ensure-ingress-host.sh] logger.go:42: 09:50:27 | production/8-check-gaID | Checking the Ingress host value was populated logger.go:42: 09:50:27 | production/8-check-gaID | Try number 0 logger.go:42: 09:50:28 | production/8-check-gaID | Hostname is production-ui-kuttl-test-fond-reptile.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com logger.go:42: 09:50:28 | production/8-check-gaID | running command: [sh -c ASSERT_PRESENT=true EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 09:50:28 | production/8-check-gaID | time="2023-12-11T09:50:28Z" level=info msg="Querying https://production-ui-kuttl-test-fond-reptile.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search..." logger.go:42: 09:50:28 | production/8-check-gaID | time="2023-12-11T09:50:28Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 09:50:28 | production/8-check-gaID | time="2023-12-11T09:50:28Z" level=info msg="Polling to https://production-ui-kuttl-test-fond-reptile.apps.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com/search" logger.go:42: 09:50:28 | production/8-check-gaID | time="2023-12-11T09:50:28Z" level=info msg="Doing request number 0" logger.go:42: 09:50:28 | production/8-check-gaID | time="2023-12-11T09:50:28Z" level=warning msg="Found: false . Assert: true" logger.go:42: 09:50:28 | production/8-check-gaID | time="2023-12-11T09:50:28Z" level=warning msg="The condition of the test function was not accomplished" logger.go:42: 09:50:28 | production/8-check-gaID | time="2023-12-11T09:50:28Z" level=info msg="Doing request number 1" logger.go:42: 09:50:28 | production/8-check-gaID | time="2023-12-11T09:50:28Z" level=warning msg="Found: false . Assert: true" logger.go:42: 09:50:28 | production/8-check-gaID | time="2023-12-11T09:50:28Z" level=warning msg="The condition of the test function was not accomplished" logger.go:42: 09:50:36 | production/8-check-gaID | time="2023-12-11T09:50:36Z" level=info msg="Doing request number 2" logger.go:42: 09:50:36 | production/8-check-gaID | time="2023-12-11T09:50:36Z" level=info msg="Content found and asserted!" logger.go:42: 09:50:36 | production/8-check-gaID | time="2023-12-11T09:50:36Z" level=info msg="Success!" logger.go:42: 09:50:36 | production/8-check-gaID | test step completed 8-check-gaID logger.go:42: 09:50:36 | production | production events from ns kuttl-test-fond-reptile: logger.go:42: 09:50:36 | production | 2023-12-11 09:44:28 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestfondreptileproductionui-1-55648f6698 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestfondreptileproductionui-1-55648ljn24 replicaset-controller logger.go:42: 09:50:36 | production | 2023-12-11 09:44:28 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfondreptileproductionui-1-55648ljn24 Binding Scheduled Successfully assigned kuttl-test-fond-reptile/elasticsearch-cdm-kuttltestfondreptileproductionui-1-55648ljn24 to ip-10-0-102-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:50:36 | production | 2023-12-11 09:44:28 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfondreptileproductionui-1-55648ljn24 AddedInterface Add eth0 [10.129.2.83/23] from ovn-kubernetes logger.go:42: 09:50:36 | production | 2023-12-11 09:44:28 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfondreptileproductionui-1-55648ljn24.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:44:28 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfondreptileproductionui-1-55648ljn24.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:44:28 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestfondreptileproductionui-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestfondreptileproductionui-1-55648f6698 to 1 deployment-controller logger.go:42: 09:50:36 | production | 2023-12-11 09:44:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfondreptileproductionui-1-55648ljn24.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:44:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfondreptileproductionui-1-55648ljn24.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:44:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfondreptileproductionui-1-55648ljn24.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:44:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfondreptileproductionui-1-55648ljn24.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:44:38 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestfondreptileproductionui-1-55648ljn24.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:44:44 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestfondreptileproductionui-1-55648ljn24.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:44:55 +0000 UTC Normal Pod production-ui-collector-7545b989db-krq6n Binding Scheduled Successfully assigned kuttl-test-fond-reptile/production-ui-collector-7545b989db-krq6n to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:50:36 | production | 2023-12-11 09:44:55 +0000 UTC Normal Pod production-ui-collector-7545b989db-krq6n AddedInterface Add eth0 [10.131.0.104/23] from ovn-kubernetes logger.go:42: 09:50:36 | production | 2023-12-11 09:44:55 +0000 UTC Normal Pod production-ui-collector-7545b989db-krq6n.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:44:55 +0000 UTC Normal Pod production-ui-collector-7545b989db-krq6n.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:44:55 +0000 UTC Normal Pod production-ui-collector-7545b989db-krq6n.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:44:55 +0000 UTC Normal ReplicaSet.apps production-ui-collector-7545b989db SuccessfulCreate Created pod: production-ui-collector-7545b989db-krq6n replicaset-controller logger.go:42: 09:50:36 | production | 2023-12-11 09:44:55 +0000 UTC Normal Deployment.apps production-ui-collector ScalingReplicaSet Scaled up replica set production-ui-collector-7545b989db to 1 deployment-controller logger.go:42: 09:50:36 | production | 2023-12-11 09:44:55 +0000 UTC Normal Pod production-ui-query-7d568684bd-dlq5l Binding Scheduled Successfully assigned kuttl-test-fond-reptile/production-ui-query-7d568684bd-dlq5l to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:50:36 | production | 2023-12-11 09:44:55 +0000 UTC Warning Pod production-ui-query-7d568684bd-dlq5l FailedMount MountVolume.SetUp failed for volume "production-ui-ui-oauth-proxy-tls" : secret "production-ui-ui-oauth-proxy-tls" not found kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:44:55 +0000 UTC Normal ReplicaSet.apps production-ui-query-7d568684bd SuccessfulCreate Created pod: production-ui-query-7d568684bd-dlq5l replicaset-controller logger.go:42: 09:50:36 | production | 2023-12-11 09:44:55 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-7d568684bd to 1 deployment-controller logger.go:42: 09:50:36 | production | 2023-12-11 09:44:56 +0000 UTC Normal Pod production-ui-query-7d568684bd-dlq5l AddedInterface Add eth0 [10.128.2.89/23] from ovn-kubernetes logger.go:42: 09:50:36 | production | 2023-12-11 09:44:56 +0000 UTC Normal Pod production-ui-query-7d568684bd-dlq5l.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:44:56 +0000 UTC Normal Pod production-ui-query-7d568684bd-dlq5l.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:44:56 +0000 UTC Normal Pod production-ui-query-7d568684bd-dlq5l.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:44:56 +0000 UTC Normal Pod production-ui-query-7d568684bd-dlq5l.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:44:56 +0000 UTC Normal Pod production-ui-query-7d568684bd-dlq5l.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:44:56 +0000 UTC Normal Pod production-ui-query-7d568684bd-dlq5l.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:44:56 +0000 UTC Normal Pod production-ui-query-7d568684bd-dlq5l.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:44:56 +0000 UTC Normal Pod production-ui-query-7d568684bd-dlq5l.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:44:56 +0000 UTC Normal Pod production-ui-query-7d568684bd-dlq5l.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:45:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:50:36 | production | 2023-12-11 09:45:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:50:36 | production | 2023-12-11 09:45:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:50:36 | production | 2023-12-11 09:45:40 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 09:50:36 | production | 2023-12-11 09:45:40 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod production-ui-collector-7545b989db-krq6n horizontal-pod-autoscaler logger.go:42: 09:50:36 | production | 2023-12-11 09:45:40 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 09:50:36 | production | 2023-12-11 09:50:09 +0000 UTC Normal Pod production-ui-query-7d568684bd-dlq5l.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:09 +0000 UTC Normal Pod production-ui-query-7d568684bd-dlq5l.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:09 +0000 UTC Normal Pod production-ui-query-7d568684bd-dlq5l.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:09 +0000 UTC Normal ReplicaSet.apps production-ui-query-7d568684bd SuccessfulDelete Deleted pod: production-ui-query-7d568684bd-dlq5l replicaset-controller logger.go:42: 09:50:36 | production | 2023-12-11 09:50:09 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-7d568684bd to 0 from 1 deployment-controller logger.go:42: 09:50:36 | production | 2023-12-11 09:50:10 +0000 UTC Normal Pod production-ui-query-6cdf6884f9-xcdjz Binding Scheduled Successfully assigned kuttl-test-fond-reptile/production-ui-query-6cdf6884f9-xcdjz to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:50:36 | production | 2023-12-11 09:50:10 +0000 UTC Normal Pod production-ui-query-6cdf6884f9-xcdjz AddedInterface Add eth0 [10.128.2.90/23] from ovn-kubernetes logger.go:42: 09:50:36 | production | 2023-12-11 09:50:10 +0000 UTC Normal Pod production-ui-query-6cdf6884f9-xcdjz.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:10 +0000 UTC Normal Pod production-ui-query-6cdf6884f9-xcdjz.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:10 +0000 UTC Normal Pod production-ui-query-6cdf6884f9-xcdjz.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:10 +0000 UTC Normal Pod production-ui-query-6cdf6884f9-xcdjz.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:10 +0000 UTC Normal Pod production-ui-query-6cdf6884f9-xcdjz.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:10 +0000 UTC Normal Pod production-ui-query-6cdf6884f9-xcdjz.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:10 +0000 UTC Normal Pod production-ui-query-6cdf6884f9-xcdjz.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:10 +0000 UTC Normal Pod production-ui-query-6cdf6884f9-xcdjz.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:10 +0000 UTC Normal Pod production-ui-query-6cdf6884f9-xcdjz.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:10 +0000 UTC Normal ReplicaSet.apps production-ui-query-6cdf6884f9 SuccessfulCreate Created pod: production-ui-query-6cdf6884f9-xcdjz replicaset-controller logger.go:42: 09:50:36 | production | 2023-12-11 09:50:10 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-6cdf6884f9 to 1 deployment-controller logger.go:42: 09:50:36 | production | 2023-12-11 09:50:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod production-ui-collector-7545b989db-krq6n horizontal-pod-autoscaler logger.go:42: 09:50:36 | production | 2023-12-11 09:50:18 +0000 UTC Normal Pod production-ui-query-6cdf6884f9-xcdjz.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:18 +0000 UTC Normal Pod production-ui-query-6cdf6884f9-xcdjz.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:18 +0000 UTC Normal Pod production-ui-query-6cdf6884f9-xcdjz.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:18 +0000 UTC Normal ReplicaSet.apps production-ui-query-6cdf6884f9 SuccessfulDelete Deleted pod: production-ui-query-6cdf6884f9-xcdjz replicaset-controller logger.go:42: 09:50:36 | production | 2023-12-11 09:50:18 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-6cdf6884f9 to 0 from 1 deployment-controller logger.go:42: 09:50:36 | production | 2023-12-11 09:50:20 +0000 UTC Normal Pod production-ui-query-64f5f4848c-t8rth Binding Scheduled Successfully assigned kuttl-test-fond-reptile/production-ui-query-64f5f4848c-t8rth to ip-10-0-29-148.us-east-2.compute.internal default-scheduler logger.go:42: 09:50:36 | production | 2023-12-11 09:50:20 +0000 UTC Normal Pod production-ui-query-64f5f4848c-t8rth AddedInterface Add eth0 [10.128.2.91/23] from ovn-kubernetes logger.go:42: 09:50:36 | production | 2023-12-11 09:50:20 +0000 UTC Normal Pod production-ui-query-64f5f4848c-t8rth.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:20 +0000 UTC Normal Pod production-ui-query-64f5f4848c-t8rth.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:20 +0000 UTC Normal Pod production-ui-query-64f5f4848c-t8rth.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:20 +0000 UTC Normal Pod production-ui-query-64f5f4848c-t8rth.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:20 +0000 UTC Normal Pod production-ui-query-64f5f4848c-t8rth.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:20 +0000 UTC Normal Pod production-ui-query-64f5f4848c-t8rth.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:20 +0000 UTC Normal ReplicaSet.apps production-ui-query-64f5f4848c SuccessfulCreate Created pod: production-ui-query-64f5f4848c-t8rth replicaset-controller logger.go:42: 09:50:36 | production | 2023-12-11 09:50:20 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-64f5f4848c to 1 deployment-controller logger.go:42: 09:50:36 | production | 2023-12-11 09:50:28 +0000 UTC Normal Pod production-ui-query-64f5f4848c-t8rth.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:28 +0000 UTC Normal Pod production-ui-query-64f5f4848c-t8rth.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:28 +0000 UTC Normal ReplicaSet.apps production-ui-query-64f5f4848c SuccessfulDelete Deleted pod: production-ui-query-64f5f4848c-t8rth replicaset-controller logger.go:42: 09:50:36 | production | 2023-12-11 09:50:28 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-64f5f4848c to 0 from 1 deployment-controller logger.go:42: 09:50:36 | production | 2023-12-11 09:50:29 +0000 UTC Normal Pod production-ui-query-7b7fb6f47b-bddtc Binding Scheduled Successfully assigned kuttl-test-fond-reptile/production-ui-query-7b7fb6f47b-bddtc to ip-10-0-33-66.us-east-2.compute.internal default-scheduler logger.go:42: 09:50:36 | production | 2023-12-11 09:50:29 +0000 UTC Normal Pod production-ui-query-7b7fb6f47b-bddtc AddedInterface Add eth0 [10.131.0.106/23] from ovn-kubernetes logger.go:42: 09:50:36 | production | 2023-12-11 09:50:29 +0000 UTC Normal Pod production-ui-query-7b7fb6f47b-bddtc.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:29 +0000 UTC Normal Pod production-ui-query-7b7fb6f47b-bddtc.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:29 +0000 UTC Normal Pod production-ui-query-7b7fb6f47b-bddtc.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:29 +0000 UTC Normal Pod production-ui-query-7b7fb6f47b-bddtc.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:29 +0000 UTC Normal ReplicaSet.apps production-ui-query-7b7fb6f47b SuccessfulCreate Created pod: production-ui-query-7b7fb6f47b-bddtc replicaset-controller logger.go:42: 09:50:36 | production | 2023-12-11 09:50:29 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-7b7fb6f47b to 1 deployment-controller logger.go:42: 09:50:36 | production | 2023-12-11 09:50:30 +0000 UTC Normal Pod production-ui-query-7b7fb6f47b-bddtc.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:50:36 | production | 2023-12-11 09:50:30 +0000 UTC Normal Pod production-ui-query-7b7fb6f47b-bddtc.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:50:36 | production | Deleting namespace: kuttl-test-fond-reptile === CONT kuttl/harness/artifacts logger.go:42: 09:50:43 | artifacts | Creating namespace: kuttl-test-robust-shiner logger.go:42: 09:50:43 | artifacts | artifacts events from ns kuttl-test-robust-shiner: logger.go:42: 09:50:43 | artifacts | Deleting namespace: kuttl-test-robust-shiner === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (442.27s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/allinone (54.94s) --- PASS: kuttl/harness/production (381.45s) --- PASS: kuttl/harness/artifacts (5.71s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name ui --report --output /logs/artifacts/ui.xml ./artifacts/kuttl-report.xml time="2023-12-11T09:50:49Z" level=debug msg="Setting a new name for the test suites" time="2023-12-11T09:50:49Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-11T09:50:49Z" level=debug msg="normalizing test case names" time="2023-12-11T09:50:49Z" level=debug msg="ui/allinone -> ui_allinone" time="2023-12-11T09:50:49Z" level=debug msg="ui/production -> ui_production" time="2023-12-11T09:50:49Z" level=debug msg="ui/artifacts -> ui_artifacts" +---------------+--------+ | NAME | RESULT | +---------------+--------+ | ui_allinone | passed | | ui_production | passed | | ui_artifacts | passed | +---------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh upgrade false true + '[' 3 -ne 3 ']' + test_suite_name=upgrade + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/upgrade.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-upgrade make[2]: Entering directory '/tmp/jaeger-tests' make docker JAEGER_VERSION=1.51.1 IMG="quay.io//jaeger-operator:next" make[3]: Entering directory '/tmp/jaeger-tests' [ ! -z "true" ] || docker build --build-arg=GOPROXY= --build-arg=VERSION="1.51.0" --build-arg=JAEGER_VERSION=1.51.1 --build-arg=TARGETARCH= --build-arg VERSION_DATE=2023-12-11T09:50:49Z --build-arg VERSION_PKG="github.com/jaegertracing/jaeger-operator/pkg/version" -t "quay.io//jaeger-operator:next" . make[3]: Leaving directory '/tmp/jaeger-tests' touch build-e2e-upgrade-image SKIP_ES_EXTERNAL=true IMG=quay.io//jaeger-operator:"1.51.0" JAEGER_OPERATOR_VERSION="1.51.0" JAEGER_VERSION="1.51.0" ./tests/e2e/upgrade/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-09-012410 True False 3h5m Cluster version is 4.15.0-0.nightly-2023-12-09-012410' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-09-012410 True False 3h5m Cluster version is 4.15.0-0.nightly-2023-12-09-012410' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/upgrade/render.sh ++ export SUITE_DIR=./tests/e2e/upgrade ++ SUITE_DIR=./tests/e2e/upgrade ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/upgrade ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + export JAEGER_NAME + '[' true = true ']' + skip_test upgrade 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade + warning 'upgrade: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade: Test not supported in OpenShift\e[0m' WAR: upgrade: Test not supported in OpenShift + '[' true = true ']' + skip_test upgrade-from-latest-release 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade-from-latest-release + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade-from-latest-release + warning 'upgrade-from-latest-release: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade-from-latest-release: Test not supported in OpenShift\e[0m' WAR: upgrade-from-latest-release: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running upgrade E2E tests' Running upgrade E2E tests + cd tests/e2e/upgrade/_build + set +e + KUBECONFIG=/tmp/kubeconfig-282053367 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-2b8p0w0t-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 09:50:51 | artifacts | Creating namespace: kuttl-test-crisp-bengal logger.go:42: 09:50:51 | artifacts | artifacts events from ns kuttl-test-crisp-bengal: logger.go:42: 09:50:51 | artifacts | Deleting namespace: kuttl-test-crisp-bengal === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (5.96s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.80s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name upgrade --report --output /logs/artifacts/upgrade.xml ./artifacts/kuttl-report.xml time="2023-12-11T09:50:57Z" level=debug msg="Setting a new name for the test suites" time="2023-12-11T09:50:57Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-11T09:50:57Z" level=debug msg="normalizing test case names" time="2023-12-11T09:50:57Z" level=debug msg="upgrade/artifacts -> upgrade_artifacts" +-------------------+--------+ | NAME | RESULT | +-------------------+--------+ | upgrade_artifacts | passed | +-------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests'