% Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 3831 100 3831 0 0 29784 0 --:--:-- --:--:-- --:--:-- 29929 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 2144 100 2144 0 0 19110 0 --:--:-- --:--:-- --:--:-- 19142 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 350 100 350 0 0 3172 0 --:--:-- --:--:-- --:--:-- 3181 Installing kuttl Try 0... curl -sLo /tmp/jaeger-tests/hack/install/../../bin/kubectl-kuttl https://github.com/kudobuilder/kuttl/releases/download/v0.15.0/kubectl-kuttl_0.15.0_linux_x86_64 KUBECONFIG file is: /tmp/kubeconfig-1891468343 for suite in elasticsearch examples generate miscellaneous sidecar streaming ui upgrade; do \ make run-e2e-tests-$suite ; \ done make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh elasticsearch false true + '[' 3 -ne 3 ']' + test_suite_name=elasticsearch + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/elasticsearch.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-elasticsearch make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true \ KAFKA_VERSION=0.32.0 \ SKIP_KAFKA=false \ ./tests/e2e/elasticsearch/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-04-120954 True False 7m21s Cluster version is 4.15.0-0.nightly-2023-11-04-120954' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-04-120954 True False 7m21s Cluster version is 4.15.0-0.nightly-2023-11-04-120954' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 0.32.0 ']' ++ version_le 0.32.0 0.25.0 +++ echo 0.32.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 0.32.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/elasticsearch/render.sh ++ export SUITE_DIR=./tests/e2e/elasticsearch ++ SUITE_DIR=./tests/e2e/elasticsearch ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/elasticsearch ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + start_test es-from-aio-to-production + '[' 1 -ne 1 ']' + test_name=es-from-aio-to-production + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-from-aio-to-production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-from-aio-to-production\e[0m' Rendering files for test es-from-aio-to-production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-from-aio-to-production + cd es-from-aio-to-production + jaeger_name=my-jaeger + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 03 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=03 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./03-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch.redundancyPolicy="ZeroRedundancy"' ./03-install.yaml + render_smoke_test my-jaeger true 04 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=04 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./04-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test es-increasing-replicas + '[' 1 -ne 1 ']' + test_name=es-increasing-replicas + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-increasing-replicas' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-increasing-replicas\e[0m' Rendering files for test es-increasing-replicas + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-from-aio-to-production + '[' es-from-aio-to-production '!=' _build ']' + cd .. + mkdir -p es-increasing-replicas + cd es-increasing-replicas + jaeger_name=simple-prod + '[' true = true ']' + jaeger_deployment_mode=production_autoprovisioned + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.replicas=2 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.query.replicas=2 ./02-install.yaml + cp ./01-assert.yaml ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.replicas=2 ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .status.readyReplicas=2 ./02-assert.yaml + render_smoke_test simple-prod true 03 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=03 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./03-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + cp ./02-install.yaml ./04-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.elasticsearch.nodeCount=2 ./04-install.yaml + /tmp/jaeger-tests/bin/gomplate -f ./openshift-check-es-nodes.yaml.template -o ./05-check-es-nodes.yaml + '[' true = true ']' + skip_test es-index-cleaner-upstream 'SKIP_ES_EXTERNAL is true' + '[' 2 -ne 2 ']' + test_name=es-index-cleaner-upstream + message='SKIP_ES_EXTERNAL is true' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-increasing-replicas + '[' es-increasing-replicas '!=' _build ']' + cd .. + rm -rf es-index-cleaner-upstream + warning 'es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true\e[0m' WAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true + '[' true = true ']' + es_index_cleaner -autoprov production_autoprovisioned + '[' 2 -ne 2 ']' + postfix=-autoprov + jaeger_deployment_strategy=production_autoprovisioned + start_test es-index-cleaner-autoprov + '[' 1 -ne 1 ']' + test_name=es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-index-cleaner-autoprov' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-index-cleaner-autoprov\e[0m' Rendering files for test es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-index-cleaner-autoprov + cd es-index-cleaner-autoprov + jaeger_name=test-es-index-cleaner-with-prefix + cronjob_name=test-es-index-cleaner-with-prefix-es-index-cleaner + secured_es_connection=false + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_managed_es ']' + ELASTICSEARCH_URL=https://elasticsearch + secured_es_connection=true + cp ../../es-index-cleaner-upstream/04-assert.yaml ../../es-index-cleaner-upstream/README.md . + render_install_jaeger test-es-index-cleaner-with-prefix production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options.es.index-prefix=""' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.enabled=false ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.numberOfDays=0 ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.esIndexCleaner.schedule="*/1 * * * *"' ./01-install.yaml + render_report_spans test-es-index-cleaner-with-prefix true 5 00 true 02 + '[' 6 -ne 6 ']' + jaeger=test-es-index-cleaner-with-prefix + is_secured=true + number_of_spans=5 + job_number=00 + ensure_reported_spans=true + test_step=02 + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + export JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + export JOB_NUMBER=00 + JOB_NUMBER=00 + export DAYS=5 + DAYS=5 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./02-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./02-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + sed 's~enabled: false~enabled: true~gi' ./01-install.yaml + CRONJOB_NAME=test-es-index-cleaner-with-prefix-es-index-cleaner + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/wait-for-cronjob-execution.yaml.template -o ./04-wait-es-index-cleaner.yaml + /tmp/jaeger-tests/bin/gomplate -f ./01-install.yaml -o ./05-install.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' 00 06 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + job_number=00 + test_step=06 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=test-es-index-cleaner-with-prefix-curator + JOB_NUMBER=00 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + MOUNT_SECRET=test-es-index-cleaner-with-prefix-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./06-check-indices.yaml + JOB_NUMBER=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./06-assert.yaml + '[' true = true ']' + get_elasticsearch_openshift_operator_version + export ESO_OPERATOR_VERSION + '[' true = true ']' ++ kubectl get pods -l name=elasticsearch-operator --all-namespaces '-o=jsonpath={.items[0].metadata.annotations.operatorframework\.io/properties}' + properties='{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.7"}}]}' + '[' -z '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.7"}}]}' ']' ++ echo '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.7"}}]}' ++ /tmp/jaeger-tests/bin/yq e -P '.properties.[] | select(.value.packageName == "elasticsearch-operator") | .value.version' + ESO_OPERATOR_VERSION=5.7.7 ++ version_ge 5.7.7 5.4 +++ echo 5.7.7 5.4 +++ tr ' ' '\n' +++ sort -rV +++ head -n 1 ++ test 5.7.7 == 5.7.7 + '[' -n '' ']' + skip_test es-index-cleaner-managed 'Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 2 -ne 2 ']' + test_name=es-index-cleaner-managed + message='Test only supported with Elasticsearch OpenShift Operator >= 5.4' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-index-cleaner-autoprov + '[' es-index-cleaner-autoprov '!=' _build ']' + cd .. + rm -rf es-index-cleaner-managed + warning 'es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4\e[0m' WAR: es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4 + '[' true = true ']' + start_test es-multiinstance + '[' 1 -ne 1 ']' + test_name=es-multiinstance + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-multiinstance' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-multiinstance\e[0m' Rendering files for test es-multiinstance + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-multiinstance + cd es-multiinstance + jaeger_name=instance-1 + render_install_jaeger instance-1 production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=instance-1 + JAEGER_NAME=instance-1 + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/gomplate -f ./03-create-second-instance.yaml.template -o 03-create-second-instance.yaml + '[' true = true ']' + skip_test es-rollover-upstream 'SKIP_ES_EXTERNAL is true' + '[' 2 -ne 2 ']' + test_name=es-rollover-upstream + message='SKIP_ES_EXTERNAL is true' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-multiinstance + '[' es-multiinstance '!=' _build ']' + cd .. + rm -rf es-rollover-upstream + warning 'es-rollover-upstream: SKIP_ES_EXTERNAL is true' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-rollover-upstream: SKIP_ES_EXTERNAL is true\e[0m' WAR: es-rollover-upstream: SKIP_ES_EXTERNAL is true + '[' true = true ']' + es_rollover -autoprov production_autoprovisioned + '[' 2 -ne 2 ']' + postfix=-autoprov + jaeger_deployment_strategy=production_autoprovisioned + start_test es-rollover-autoprov + '[' 1 -ne 1 ']' + test_name=es-rollover-autoprov + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-rollover-autoprov' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-rollover-autoprov\e[0m' Rendering files for test es-rollover-autoprov + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-rollover-autoprov + cd es-rollover-autoprov + cp ../../es-rollover-upstream/05-assert.yaml ../../es-rollover-upstream/05-install.yaml ../../es-rollover-upstream/README.md . + jaeger_name=my-jaeger + secured_es_connection=false + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_managed_es ']' + ELASTICSEARCH_URL=https://elasticsearch + secured_es_connection=true + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_report_spans my-jaeger true 2 00 true 02 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=00 + ensure_reported_spans=true + test_step=02 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=00 + JOB_NUMBER=00 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./02-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./02-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' 00 03 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + job_number=00 + test_step=03 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=00 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./03-check-indices.yaml + JOB_NUMBER=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./03-assert.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' 01 04 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + job_number=01 + test_step=04 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=01 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./04-check-indices.yaml + JOB_NUMBER=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./04-assert.yaml + render_report_spans my-jaeger true 2 02 true 06 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=02 + ensure_reported_spans=true + test_step=06 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=02 + JOB_NUMBER=02 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./06-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./06-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' 02 07 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + job_number=02 + test_step=07 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=02 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./07-check-indices.yaml + JOB_NUMBER=02 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./07-assert.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' 03 08 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + job_number=03 + test_step=08 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=03 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./08-check-indices.yaml + JOB_NUMBER=03 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./08-assert.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' 04 09 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + job_number=04 + test_step=09 + escape_command ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=04 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./09-check-indices.yaml + JOB_NUMBER=04 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./09-assert.yaml + render_report_spans my-jaeger true 2 03 true 10 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=03 + ensure_reported_spans=true + test_step=10 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=03 + JOB_NUMBER=03 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./10-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./10-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + CRONJOB_NAME=my-jaeger-es-rollover + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/wait-for-cronjob-execution.yaml.template -o ./11-wait-rollover.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-000002'\'',' 05 11 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-000002'\'',' + job_number=05 + test_step=11 + escape_command ''\''--name'\'', '\''jaeger-span-000002'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-000002'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-000002'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-000002'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-000002'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=05 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-000002'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./11-check-indices.yaml + JOB_NUMBER=05 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./11-assert.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' 06 12 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + job_number=06 + test_step=12 + escape_command ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=06 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./12-check-indices.yaml + JOB_NUMBER=06 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./12-assert.yaml + '[' true = true ']' + get_elasticsearch_openshift_operator_version + export ESO_OPERATOR_VERSION + '[' true = true ']' ++ kubectl get pods -l name=elasticsearch-operator --all-namespaces '-o=jsonpath={.items[0].metadata.annotations.operatorframework\.io/properties}' + properties='{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.7"}}]}' + '[' -z '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.7"}}]}' ']' ++ echo '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.7.7"}}]}' ++ /tmp/jaeger-tests/bin/yq e -P '.properties.[] | select(.value.packageName == "elasticsearch-operator") | .value.version' + ESO_OPERATOR_VERSION=5.7.7 ++ version_ge 5.7.7 5.4 +++ echo 5.7.7 5.4 +++ tr ' ' '\n' +++ sort -rV +++ head -n 1 ++ test 5.7.7 == 5.7.7 + '[' -n '' ']' + skip_test es-rollover-managed 'Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 2 -ne 2 ']' + test_name=es-rollover-managed + message='Test only supported with Elasticsearch OpenShift Operator >= 5.4' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-rollover-autoprov + '[' es-rollover-autoprov '!=' _build ']' + cd .. + rm -rf es-rollover-managed + warning 'es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4\e[0m' WAR: es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4 + '[' true = true ']' + skip_test es-spark-dependencies 'This test is not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=es-spark-dependencies + message='This test is not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + rm -rf es-spark-dependencies + warning 'es-spark-dependencies: This test is not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-spark-dependencies: This test is not supported in OpenShift\e[0m' WAR: es-spark-dependencies: This test is not supported in OpenShift + [[ true = true ]] + [[ false = false ]] + start_test es-streaming-autoprovisioned + '[' 1 -ne 1 ']' + test_name=es-streaming-autoprovisioned + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-streaming-autoprovisioned' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-streaming-autoprovisioned\e[0m' Rendering files for test es-streaming-autoprovisioned + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-streaming-autoprovisioned + cd es-streaming-autoprovisioned + jaeger_name=auto-provisioned + render_assert_kafka true auto-provisioned 00 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provisioned + test_step=00 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./1-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_smoke_test auto-provisioned true 04 + '[' 3 -ne 3 ']' + jaeger=auto-provisioned + is_secured=true + test_step=04 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://auto-provisioned-query:443 + JAEGER_QUERY_ENDPOINT=https://auto-provisioned-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://auto-provisioned-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://auto-provisioned-collector-headless:14268 + export JAEGER_NAME=auto-provisioned + JAEGER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./04-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running elasticsearch E2E tests' Running elasticsearch E2E tests + cd tests/e2e/elasticsearch/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1891468343 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 8 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/es-from-aio-to-production === PAUSE kuttl/harness/es-from-aio-to-production === RUN kuttl/harness/es-increasing-replicas === PAUSE kuttl/harness/es-increasing-replicas === RUN kuttl/harness/es-index-cleaner-autoprov === PAUSE kuttl/harness/es-index-cleaner-autoprov === RUN kuttl/harness/es-multiinstance === PAUSE kuttl/harness/es-multiinstance === RUN kuttl/harness/es-rollover-autoprov === PAUSE kuttl/harness/es-rollover-autoprov === RUN kuttl/harness/es-simple-prod === PAUSE kuttl/harness/es-simple-prod === RUN kuttl/harness/es-streaming-autoprovisioned === PAUSE kuttl/harness/es-streaming-autoprovisioned === CONT kuttl/harness/artifacts logger.go:42: 08:22:10 | artifacts | Creating namespace: kuttl-test-social-dragon logger.go:42: 08:22:10 | artifacts | artifacts events from ns kuttl-test-social-dragon: logger.go:42: 08:22:10 | artifacts | Deleting namespace: kuttl-test-social-dragon === CONT kuttl/harness/es-multiinstance logger.go:42: 08:22:16 | es-multiinstance | Ignoring 03-create-second-instance.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:22:16 | es-multiinstance | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:22:16 | es-multiinstance | Creating namespace: kuttl-test-moral-egret logger.go:42: 08:22:16 | es-multiinstance/0-clear-namespace | starting test step 0-clear-namespace logger.go:42: 08:22:16 | es-multiinstance/0-clear-namespace | running command: [sh -c kubectl delete namespace jaeger-e2e-multiinstance-test --ignore-not-found=true] logger.go:42: 08:22:16 | es-multiinstance/0-clear-namespace | test step completed 0-clear-namespace logger.go:42: 08:22:16 | es-multiinstance/1-install | starting test step 1-install logger.go:42: 08:22:16 | es-multiinstance/1-install | Jaeger:kuttl-test-moral-egret/instance-1 created logger.go:42: 08:23:04 | es-multiinstance/1-install | test step completed 1-install logger.go:42: 08:23:04 | es-multiinstance/2-create-namespace | starting test step 2-create-namespace logger.go:42: 08:23:04 | es-multiinstance/2-create-namespace | running command: [sh -c kubectl create namespace jaeger-e2e-multiinstance-test] logger.go:42: 08:23:04 | es-multiinstance/2-create-namespace | namespace/jaeger-e2e-multiinstance-test created logger.go:42: 08:23:04 | es-multiinstance/2-create-namespace | test step completed 2-create-namespace logger.go:42: 08:23:04 | es-multiinstance/3-create-second-instance | starting test step 3-create-second-instance logger.go:42: 08:23:04 | es-multiinstance/3-create-second-instance | running command: [sh -c kubectl apply -f ./01-install.yaml -n jaeger-e2e-multiinstance-test] logger.go:42: 08:23:07 | es-multiinstance/3-create-second-instance | jaeger.jaegertracing.io/instance-1 created logger.go:42: 08:23:07 | es-multiinstance/3-create-second-instance | running command: [sh -c /tmp/jaeger-tests/bin/kubectl-kuttl assert ./01-assert.yaml -n jaeger-e2e-multiinstance-test --timeout 1000] logger.go:42: 08:23:53 | es-multiinstance/3-create-second-instance | assert is valid logger.go:42: 08:23:53 | es-multiinstance/3-create-second-instance | test step completed 3-create-second-instance logger.go:42: 08:23:53 | es-multiinstance/4-check-secrets | starting test step 4-check-secrets logger.go:42: 08:23:53 | es-multiinstance/4-check-secrets | running command: [sh -c kubectl get secrets elasticsearch -o jsonpath='{.data.logging-es\.crt}' -n $NAMESPACE > secret1] logger.go:42: 08:23:53 | es-multiinstance/4-check-secrets | running command: [sh -c kubectl get secrets elasticsearch -o jsonpath='{.data.logging-es\.crt}' -n jaeger-e2e-multiinstance-test > secret2] logger.go:42: 08:23:53 | es-multiinstance/4-check-secrets | running command: [sh -c cmp --silent secret1 secret2 || exit 0] logger.go:42: 08:23:53 | es-multiinstance/4-check-secrets | test step completed 4-check-secrets logger.go:42: 08:23:53 | es-multiinstance/5-delete | starting test step 5-delete logger.go:42: 08:23:53 | es-multiinstance/5-delete | running command: [sh -c kubectl delete namespace jaeger-e2e-multiinstance-test --wait=false] logger.go:42: 08:23:53 | es-multiinstance/5-delete | namespace "jaeger-e2e-multiinstance-test" deleted logger.go:42: 08:23:53 | es-multiinstance/5-delete | test step completed 5-delete logger.go:42: 08:23:53 | es-multiinstance | es-multiinstance events from ns kuttl-test-moral-egret: logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:22 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestmoralegretinstance1-1-6d47595884 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestmoralegretinstance1-1-6d4759588wxgz2 replicaset-controller logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmoralegretinstance1-1-6d4759588wxgz2 Binding Scheduled Successfully assigned kuttl-test-moral-egret/elasticsearch-cdm-kuttltestmoralegretinstance1-1-6d4759588wxgz2 to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:22 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestmoralegretinstance1-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestmoralegretinstance1-1-6d47595884 to 1 deployment-controller logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmoralegretinstance1-1-6d4759588wxgz2 AddedInterface Add eth0 [10.129.2.19/23] from ovn-kubernetes logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmoralegretinstance1-1-6d4759588wxgz2.spec.containers{elasticsearch} Pulling Pulling image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmoralegretinstance1-1-6d4759588wxgz2.spec.containers{elasticsearch} Pulled Successfully pulled image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" in 6.506s (6.506s including waiting) kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmoralegretinstance1-1-6d4759588wxgz2.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmoralegretinstance1-1-6d4759588wxgz2.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmoralegretinstance1-1-6d4759588wxgz2.spec.containers{proxy} Pulling Pulling image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmoralegretinstance1-1-6d4759588wxgz2.spec.containers{proxy} Pulled Successfully pulled image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" in 2.417s (2.417s including waiting) kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmoralegretinstance1-1-6d4759588wxgz2.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmoralegretinstance1-1-6d4759588wxgz2.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:43 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestmoralegretinstance1-1-6d4759588wxgz2.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:54 +0000 UTC Normal Pod instance-1-collector-899cdd544-w6j28 Binding Scheduled Successfully assigned kuttl-test-moral-egret/instance-1-collector-899cdd544-w6j28 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:54 +0000 UTC Normal Pod instance-1-collector-899cdd544-w6j28 AddedInterface Add eth0 [10.128.2.25/23] from ovn-kubernetes logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:54 +0000 UTC Normal ReplicaSet.apps instance-1-collector-899cdd544 SuccessfulCreate Created pod: instance-1-collector-899cdd544-w6j28 replicaset-controller logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:54 +0000 UTC Normal Deployment.apps instance-1-collector ScalingReplicaSet Scaled up replica set instance-1-collector-899cdd544 to 1 deployment-controller logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:54 +0000 UTC Normal Pod instance-1-query-6b967ff66-ggdbm Binding Scheduled Successfully assigned kuttl-test-moral-egret/instance-1-query-6b967ff66-ggdbm to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:54 +0000 UTC Normal Pod instance-1-query-6b967ff66-ggdbm AddedInterface Add eth0 [10.131.0.20/23] from ovn-kubernetes logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:54 +0000 UTC Normal Pod instance-1-query-6b967ff66-ggdbm.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:54 +0000 UTC Normal ReplicaSet.apps instance-1-query-6b967ff66 SuccessfulCreate Created pod: instance-1-query-6b967ff66-ggdbm replicaset-controller logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:54 +0000 UTC Normal Deployment.apps instance-1-query ScalingReplicaSet Scaled up replica set instance-1-query-6b967ff66 to 1 deployment-controller logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:55 +0000 UTC Normal Pod instance-1-collector-899cdd544-w6j28.spec.containers{jaeger-collector} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:57 +0000 UTC Normal Pod instance-1-collector-899cdd544-w6j28.spec.containers{jaeger-collector} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" in 2.434s (2.434s including waiting) kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:57 +0000 UTC Normal Pod instance-1-collector-899cdd544-w6j28.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:57 +0000 UTC Normal Pod instance-1-collector-899cdd544-w6j28.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:59 +0000 UTC Normal Pod instance-1-query-6b967ff66-ggdbm.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" in 4.289s (4.289s including waiting) kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:59 +0000 UTC Normal Pod instance-1-query-6b967ff66-ggdbm.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:59 +0000 UTC Normal Pod instance-1-query-6b967ff66-ggdbm.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:59 +0000 UTC Normal Pod instance-1-query-6b967ff66-ggdbm.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:59 +0000 UTC Normal Pod instance-1-query-6b967ff66-ggdbm.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:59 +0000 UTC Normal Pod instance-1-query-6b967ff66-ggdbm.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:22:59 +0000 UTC Normal Pod instance-1-query-6b967ff66-ggdbm.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:23:01 +0000 UTC Normal Pod instance-1-query-6b967ff66-ggdbm.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" in 2.103s (2.103s including waiting) kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:23:01 +0000 UTC Normal Pod instance-1-query-6b967ff66-ggdbm.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:23:01 +0000 UTC Normal Pod instance-1-query-6b967ff66-ggdbm.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:23:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:23:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:23:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:23:41 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:23:41 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod instance-1-collector-899cdd544-w6j28 horizontal-pod-autoscaler logger.go:42: 08:23:53 | es-multiinstance | 2023-11-06 08:23:41 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:23:53 | es-multiinstance | Deleting namespace: kuttl-test-moral-egret === CONT kuttl/harness/es-streaming-autoprovisioned logger.go:42: 08:24:00 | es-streaming-autoprovisioned | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:24:00 | es-streaming-autoprovisioned | Creating namespace: kuttl-test-cosmic-koi logger.go:42: 08:24:00 | es-streaming-autoprovisioned/0-install | starting test step 0-install logger.go:42: 08:24:00 | es-streaming-autoprovisioned/0-install | Jaeger:kuttl-test-cosmic-koi/auto-provisioned created logger.go:42: 08:25:17 | es-streaming-autoprovisioned/0-install | test step completed 0-install logger.go:42: 08:25:17 | es-streaming-autoprovisioned/1- | starting test step 1- logger.go:42: 08:26:00 | es-streaming-autoprovisioned/1- | test step completed 1- logger.go:42: 08:26:00 | es-streaming-autoprovisioned/2- | starting test step 2- logger.go:42: 08:26:34 | es-streaming-autoprovisioned/2- | test step completed 2- logger.go:42: 08:26:34 | es-streaming-autoprovisioned/3- | starting test step 3- logger.go:42: 08:26:43 | es-streaming-autoprovisioned/3- | test step completed 3- logger.go:42: 08:26:43 | es-streaming-autoprovisioned/4-smoke-test | starting test step 4-smoke-test logger.go:42: 08:26:43 | es-streaming-autoprovisioned/4-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE auto-provisioned /dev/null] logger.go:42: 08:26:46 | es-streaming-autoprovisioned/4-smoke-test | Warning: resource jaegers/auto-provisioned is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:26:52 | es-streaming-autoprovisioned/4-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JAEGER_COLLECTOR_ENDPOINT=http://auto-provisioned-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://auto-provisioned-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:26:53 | es-streaming-autoprovisioned/4-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:26:55 | es-streaming-autoprovisioned/4-smoke-test | job.batch/report-span created logger.go:42: 08:26:55 | es-streaming-autoprovisioned/4-smoke-test | job.batch/check-span created logger.go:42: 08:27:09 | es-streaming-autoprovisioned/4-smoke-test | test step completed 4-smoke-test logger.go:42: 08:27:09 | es-streaming-autoprovisioned | es-streaming-autoprovisioned events from ns kuttl-test-cosmic-koi: logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:07 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcosmickoiautoprovisioned-1-6bc48nsr4 Binding Scheduled Successfully assigned kuttl-test-cosmic-koi/elasticsearch-cdm-kuttltestcosmickoiautoprovisioned-1-6bc48nsr4 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:07 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestcosmickoiautoprovisioned-1-6bc4d8c87b SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestcosmickoiautoprovisioned-1-6bc48nsr4 replicaset-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:07 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestcosmickoiautoprovisioned-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestcosmickoiautoprovisioned-1-6bc4d8c87b to 1 deployment-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:08 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcosmickoiautoprovisioned-1-6bc48nsr4 AddedInterface Add eth0 [10.128.2.27/23] from ovn-kubernetes logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:08 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcosmickoiautoprovisioned-1-6bc48nsr4.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:08 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcosmickoiautoprovisioned-1-6bc48nsr4.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:08 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcosmickoiautoprovisioned-1-6bc48nsr4.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:08 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcosmickoiautoprovisioned-1-6bc48nsr4.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:08 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcosmickoiautoprovisioned-1-6bc48nsr4.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:08 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcosmickoiautoprovisioned-1-6bc48nsr4.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:18 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestcosmickoiautoprovisioned-1-6bc48nsr4.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:23 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestcosmickoiautoprovisioned-1-6bc48nsr4.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:36 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:37 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-zookeeper NoPods No matching pods found controllermanager logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:37 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:37 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-cosmic-koi/data-auto-provisioned-zookeeper-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-7dbf65d878-d4gkx_b1fdd783-ac60-4eb6-adc0-c462cfce464e logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:40 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-659f04df-ad9f-445a-869d-97bbf3633399 ebs.csi.aws.com_aws-ebs-csi-driver-controller-7dbf65d878-d4gkx_b1fdd783-ac60-4eb6-adc0-c462cfce464e logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:41 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-cosmic-koi/auto-provisioned-zookeeper-0 to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:43 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-659f04df-ad9f-445a-869d-97bbf3633399" attachdetach-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:47 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 AddedInterface Add eth0 [10.129.2.20/23] from ovn-kubernetes logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:47 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulling Pulling image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:56 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulled Successfully pulled image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" in 8.732s (8.732s including waiting) kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:56 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:24:56 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:25:18 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:25:19 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-kafka NoPods No matching pods found controllermanager logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:25:19 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:25:19 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-cosmic-koi/data-0-auto-provisioned-kafka-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-7dbf65d878-d4gkx_b1fdd783-ac60-4eb6-adc0-c462cfce464e logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:25:22 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-c272a027-40ec-4e88-86bf-9b87d291304a ebs.csi.aws.com_aws-ebs-csi-driver-controller-7dbf65d878-d4gkx_b1fdd783-ac60-4eb6-adc0-c462cfce464e logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:25:23 +0000 UTC Normal Pod auto-provisioned-kafka-0 Binding Scheduled Successfully assigned kuttl-test-cosmic-koi/auto-provisioned-kafka-0 to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:25:25 +0000 UTC Normal Pod auto-provisioned-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-c272a027-40ec-4e88-86bf-9b87d291304a" attachdetach-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:25:30 +0000 UTC Normal Pod auto-provisioned-kafka-0 AddedInterface Add eth0 [10.131.0.23/23] from ovn-kubernetes logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:25:30 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Pulling Pulling image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:25:37 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Pulled Successfully pulled image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" in 7.127s (7.127s including waiting) kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:25:37 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:25:37 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:02 +0000 UTC Normal Pod auto-provisioned-entity-operator-7dc75b5557-cgjg8 Binding Scheduled Successfully assigned kuttl-test-cosmic-koi/auto-provisioned-entity-operator-7dc75b5557-cgjg8 to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:02 +0000 UTC Normal ReplicaSet.apps auto-provisioned-entity-operator-7dc75b5557 SuccessfulCreate Created pod: auto-provisioned-entity-operator-7dc75b5557-cgjg8 replicaset-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:02 +0000 UTC Normal Deployment.apps auto-provisioned-entity-operator ScalingReplicaSet Scaled up replica set auto-provisioned-entity-operator-7dc75b5557 to 1 deployment-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:03 +0000 UTC Normal Pod auto-provisioned-entity-operator-7dc75b5557-cgjg8 AddedInterface Add eth0 [10.129.2.21/23] from ovn-kubernetes logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:03 +0000 UTC Normal Pod auto-provisioned-entity-operator-7dc75b5557-cgjg8.spec.containers{topic-operator} Pulling Pulling image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:464b04e622e0b3472e8a1e1ce8a2efd32cf27fc2056d3d589bfe6b5f9ac0bf4e" kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:11 +0000 UTC Normal Pod auto-provisioned-entity-operator-7dc75b5557-cgjg8.spec.containers{topic-operator} Pulled Successfully pulled image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:464b04e622e0b3472e8a1e1ce8a2efd32cf27fc2056d3d589bfe6b5f9ac0bf4e" in 7.7s (7.7s including waiting) kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:11 +0000 UTC Normal Pod auto-provisioned-entity-operator-7dc75b5557-cgjg8.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:11 +0000 UTC Normal Pod auto-provisioned-entity-operator-7dc75b5557-cgjg8.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:11 +0000 UTC Normal Pod auto-provisioned-entity-operator-7dc75b5557-cgjg8.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:464b04e622e0b3472e8a1e1ce8a2efd32cf27fc2056d3d589bfe6b5f9ac0bf4e" already present on machine kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:11 +0000 UTC Normal Pod auto-provisioned-entity-operator-7dc75b5557-cgjg8.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:11 +0000 UTC Normal Pod auto-provisioned-entity-operator-7dc75b5557-cgjg8.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:11 +0000 UTC Normal Pod auto-provisioned-entity-operator-7dc75b5557-cgjg8.spec.containers{tls-sidecar} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" already present on machine kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:11 +0000 UTC Normal Pod auto-provisioned-entity-operator-7dc75b5557-cgjg8.spec.containers{tls-sidecar} Created Created container tls-sidecar kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:11 +0000 UTC Normal Pod auto-provisioned-entity-operator-7dc75b5557-cgjg8.spec.containers{tls-sidecar} Started Started container tls-sidecar kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Normal Pod auto-provisioned-collector-67499f5794-6rnbl Binding Scheduled Successfully assigned kuttl-test-cosmic-koi/auto-provisioned-collector-67499f5794-6rnbl to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Warning Pod auto-provisioned-collector-67499f5794-6rnbl FailedMount MountVolume.SetUp failed for volume "auto-provisioned-collector-tls-config-volume" : secret "auto-provisioned-collector-headless-tls" not found kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Normal ReplicaSet.apps auto-provisioned-collector-67499f5794 SuccessfulCreate Created pod: auto-provisioned-collector-67499f5794-6rnbl replicaset-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Normal Deployment.apps auto-provisioned-collector ScalingReplicaSet Scaled up replica set auto-provisioned-collector-67499f5794 to 1 deployment-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Normal Pod auto-provisioned-ingester-5d987c5fdd-rrl5v Binding Scheduled Successfully assigned kuttl-test-cosmic-koi/auto-provisioned-ingester-5d987c5fdd-rrl5v to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Normal Pod auto-provisioned-ingester-5d987c5fdd-rrl5v AddedInterface Add eth0 [10.129.2.22/23] from ovn-kubernetes logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Normal Pod auto-provisioned-ingester-5d987c5fdd-rrl5v.spec.containers{jaeger-ingester} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:109a3676bfaccda0021c57bbe82bceed5140faaedb8bad4d7d2be7cd660de039" kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Normal ReplicaSet.apps auto-provisioned-ingester-5d987c5fdd SuccessfulCreate Created pod: auto-provisioned-ingester-5d987c5fdd-rrl5v replicaset-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Normal Deployment.apps auto-provisioned-ingester ScalingReplicaSet Scaled up replica set auto-provisioned-ingester-5d987c5fdd to 1 deployment-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Normal Pod auto-provisioned-query-557487cf84-mg5fm Binding Scheduled Successfully assigned kuttl-test-cosmic-koi/auto-provisioned-query-557487cf84-mg5fm to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Normal Pod auto-provisioned-query-557487cf84-mg5fm AddedInterface Add eth0 [10.131.0.25/23] from ovn-kubernetes logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Normal Pod auto-provisioned-query-557487cf84-mg5fm.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Normal Pod auto-provisioned-query-557487cf84-mg5fm.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Normal Pod auto-provisioned-query-557487cf84-mg5fm.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Normal Pod auto-provisioned-query-557487cf84-mg5fm.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Normal Pod auto-provisioned-query-557487cf84-mg5fm.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Normal Pod auto-provisioned-query-557487cf84-mg5fm.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Normal Pod auto-provisioned-query-557487cf84-mg5fm.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Normal Pod auto-provisioned-query-557487cf84-mg5fm.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Normal Pod auto-provisioned-query-557487cf84-mg5fm.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-557487cf84 SuccessfulCreate Created pod: auto-provisioned-query-557487cf84-mg5fm replicaset-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:36 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-557487cf84 to 1 deployment-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:37 +0000 UTC Normal Pod auto-provisioned-collector-67499f5794-6rnbl AddedInterface Add eth0 [10.131.0.24/23] from ovn-kubernetes logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:37 +0000 UTC Normal Pod auto-provisioned-collector-67499f5794-6rnbl.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:37 +0000 UTC Normal Pod auto-provisioned-collector-67499f5794-6rnbl.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:37 +0000 UTC Normal Pod auto-provisioned-collector-67499f5794-6rnbl.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:42 +0000 UTC Normal Pod auto-provisioned-ingester-5d987c5fdd-rrl5v.spec.containers{jaeger-ingester} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:109a3676bfaccda0021c57bbe82bceed5140faaedb8bad4d7d2be7cd660de039" in 5.495s (5.495s including waiting) kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:42 +0000 UTC Normal Pod auto-provisioned-ingester-5d987c5fdd-rrl5v.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:42 +0000 UTC Normal Pod auto-provisioned-ingester-5d987c5fdd-rrl5v.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:47 +0000 UTC Normal Pod auto-provisioned-query-557487cf84-mg5fm.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:47 +0000 UTC Normal Pod auto-provisioned-query-557487cf84-mg5fm.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:47 +0000 UTC Normal Pod auto-provisioned-query-557487cf84-mg5fm.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:47 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-557487cf84 SuccessfulDelete Deleted pod: auto-provisioned-query-557487cf84-mg5fm replicaset-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:47 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled down replica set auto-provisioned-query-557487cf84 to 0 from 1 deployment-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:49 +0000 UTC Normal Pod auto-provisioned-query-76f7d76f74-f65vt Binding Scheduled Successfully assigned kuttl-test-cosmic-koi/auto-provisioned-query-76f7d76f74-f65vt to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:49 +0000 UTC Normal Pod auto-provisioned-query-76f7d76f74-f65vt AddedInterface Add eth0 [10.131.0.26/23] from ovn-kubernetes logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:49 +0000 UTC Normal Pod auto-provisioned-query-76f7d76f74-f65vt.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:49 +0000 UTC Normal Pod auto-provisioned-query-76f7d76f74-f65vt.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:49 +0000 UTC Normal Pod auto-provisioned-query-76f7d76f74-f65vt.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:49 +0000 UTC Normal Pod auto-provisioned-query-76f7d76f74-f65vt.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:49 +0000 UTC Normal Pod auto-provisioned-query-76f7d76f74-f65vt.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:49 +0000 UTC Normal Pod auto-provisioned-query-76f7d76f74-f65vt.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:49 +0000 UTC Normal Pod auto-provisioned-query-76f7d76f74-f65vt.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:49 +0000 UTC Normal Pod auto-provisioned-query-76f7d76f74-f65vt.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:49 +0000 UTC Normal Pod auto-provisioned-query-76f7d76f74-f65vt.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:49 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-76f7d76f74 SuccessfulCreate Created pod: auto-provisioned-query-76f7d76f74-f65vt replicaset-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:49 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-76f7d76f74 to 1 deployment-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:51 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:51 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:51 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:51 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:51 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:51 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:55 +0000 UTC Normal Pod check-span-lrsn8 Binding Scheduled Successfully assigned kuttl-test-cosmic-koi/check-span-lrsn8 to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:55 +0000 UTC Normal Pod check-span-lrsn8 AddedInterface Add eth0 [10.129.2.24/23] from ovn-kubernetes logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:55 +0000 UTC Normal Pod check-span-lrsn8.spec.containers{asserts-container} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:55 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-lrsn8 job-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:55 +0000 UTC Normal Pod report-span-dq5kl Binding Scheduled Successfully assigned kuttl-test-cosmic-koi/report-span-dq5kl to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:55 +0000 UTC Normal Pod report-span-dq5kl AddedInterface Add eth0 [10.129.2.23/23] from ovn-kubernetes logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:55 +0000 UTC Normal Pod report-span-dq5kl.spec.containers{report-span} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:55 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-dq5kl job-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:57 +0000 UTC Normal Pod check-span-lrsn8.spec.containers{asserts-container} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" in 1.784s (1.784s including waiting) kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:57 +0000 UTC Normal Pod check-span-lrsn8.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:57 +0000 UTC Normal Pod check-span-lrsn8.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:57 +0000 UTC Normal Pod report-span-dq5kl.spec.containers{report-span} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" in 1.818s (1.818s including waiting) kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:57 +0000 UTC Normal Pod report-span-dq5kl.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:26:57 +0000 UTC Normal Pod report-span-dq5kl.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:27:09 | es-streaming-autoprovisioned | 2023-11-06 08:27:08 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:27:09 | es-streaming-autoprovisioned | Deleting namespace: kuttl-test-cosmic-koi === CONT kuttl/harness/es-simple-prod logger.go:42: 08:27:49 | es-simple-prod | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:27:49 | es-simple-prod | Creating namespace: kuttl-test-wanted-bull logger.go:42: 08:27:49 | es-simple-prod | es-simple-prod events from ns kuttl-test-wanted-bull: logger.go:42: 08:27:49 | es-simple-prod | Deleting namespace: kuttl-test-wanted-bull === CONT kuttl/harness/es-rollover-autoprov logger.go:42: 08:27:55 | es-rollover-autoprov | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:27:55 | es-rollover-autoprov | Creating namespace: kuttl-test-amused-manatee logger.go:42: 08:27:55 | es-rollover-autoprov/1-install | starting test step 1-install logger.go:42: 08:27:55 | es-rollover-autoprov/1-install | Jaeger:kuttl-test-amused-manatee/my-jaeger created logger.go:42: 08:28:31 | es-rollover-autoprov/1-install | test step completed 1-install logger.go:42: 08:28:31 | es-rollover-autoprov/2-report-spans | starting test step 2-report-spans logger.go:42: 08:28:31 | es-rollover-autoprov/2-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 08:28:33 | es-rollover-autoprov/2-report-spans | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:28:39 | es-rollover-autoprov/2-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JOB_NUMBER=00 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-00-job.yaml] logger.go:42: 08:28:39 | es-rollover-autoprov/2-report-spans | running command: [sh -c kubectl apply -f report-span-00-job.yaml -n $NAMESPACE] logger.go:42: 08:28:40 | es-rollover-autoprov/2-report-spans | job.batch/00-report-span created logger.go:42: 08:29:06 | es-rollover-autoprov/2-report-spans | test step completed 2-report-spans logger.go:42: 08:29:06 | es-rollover-autoprov/3-check-indices | starting test step 3-check-indices logger.go:42: 08:29:06 | es-rollover-autoprov/3-check-indices | Job:kuttl-test-amused-manatee/00-check-indices created logger.go:42: 08:29:10 | es-rollover-autoprov/3-check-indices | test step completed 3-check-indices logger.go:42: 08:29:10 | es-rollover-autoprov/4-check-indices | starting test step 4-check-indices logger.go:42: 08:29:10 | es-rollover-autoprov/4-check-indices | Job:kuttl-test-amused-manatee/01-check-indices created logger.go:42: 08:29:14 | es-rollover-autoprov/4-check-indices | test step completed 4-check-indices logger.go:42: 08:29:14 | es-rollover-autoprov/5-install | starting test step 5-install logger.go:42: 08:29:14 | es-rollover-autoprov/5-install | Jaeger:kuttl-test-amused-manatee/my-jaeger updated logger.go:42: 08:29:24 | es-rollover-autoprov/5-install | test step completed 5-install logger.go:42: 08:29:24 | es-rollover-autoprov/6-report-spans | starting test step 6-report-spans logger.go:42: 08:29:24 | es-rollover-autoprov/6-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 08:29:31 | es-rollover-autoprov/6-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JOB_NUMBER=02 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-02-job.yaml] logger.go:42: 08:29:32 | es-rollover-autoprov/6-report-spans | running command: [sh -c kubectl apply -f report-span-02-job.yaml -n $NAMESPACE] logger.go:42: 08:29:32 | es-rollover-autoprov/6-report-spans | job.batch/02-report-span created logger.go:42: 08:29:55 | es-rollover-autoprov/6-report-spans | test step completed 6-report-spans logger.go:42: 08:29:55 | es-rollover-autoprov/7-check-indices | starting test step 7-check-indices logger.go:42: 08:29:55 | es-rollover-autoprov/7-check-indices | Job:kuttl-test-amused-manatee/02-check-indices created logger.go:42: 08:29:59 | es-rollover-autoprov/7-check-indices | test step completed 7-check-indices logger.go:42: 08:29:59 | es-rollover-autoprov/8-check-indices | starting test step 8-check-indices logger.go:42: 08:29:59 | es-rollover-autoprov/8-check-indices | Job:kuttl-test-amused-manatee/03-check-indices created logger.go:42: 08:30:03 | es-rollover-autoprov/8-check-indices | test step completed 8-check-indices logger.go:42: 08:30:03 | es-rollover-autoprov/9-check-indices | starting test step 9-check-indices logger.go:42: 08:30:03 | es-rollover-autoprov/9-check-indices | Job:kuttl-test-amused-manatee/04-check-indices created logger.go:42: 08:30:07 | es-rollover-autoprov/9-check-indices | test step completed 9-check-indices logger.go:42: 08:30:07 | es-rollover-autoprov/10-report-spans | starting test step 10-report-spans logger.go:42: 08:30:07 | es-rollover-autoprov/10-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 08:30:15 | es-rollover-autoprov/10-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JOB_NUMBER=03 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-03-job.yaml] logger.go:42: 08:30:15 | es-rollover-autoprov/10-report-spans | running command: [sh -c kubectl apply -f report-span-03-job.yaml -n $NAMESPACE] logger.go:42: 08:30:16 | es-rollover-autoprov/10-report-spans | job.batch/03-report-span created logger.go:42: 08:30:40 | es-rollover-autoprov/10-report-spans | test step completed 10-report-spans logger.go:42: 08:30:40 | es-rollover-autoprov/11-check-indices | starting test step 11-check-indices logger.go:42: 08:30:40 | es-rollover-autoprov/11-check-indices | running command: [sh -c go run ../../../../cmd-utils/wait-cronjob/main.go --cronjob my-jaeger-es-rollover --namespace $NAMESPACE] logger.go:42: 08:30:49 | es-rollover-autoprov/11-check-indices | time="2023-11-06T08:30:49Z" level=debug msg="Checking if the my-jaeger-es-rollover CronJob exists" logger.go:42: 08:30:49 | es-rollover-autoprov/11-check-indices | time="2023-11-06T08:30:49Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 08:30:49 | es-rollover-autoprov/11-check-indices | time="2023-11-06T08:30:49Z" level=info msg="Cronjob my-jaeger-es-rollover found successfully" logger.go:42: 08:30:49 | es-rollover-autoprov/11-check-indices | time="2023-11-06T08:30:49Z" level=debug msg="Waiting for the next scheduled job from my-jaeger-es-rollover cronjob" logger.go:42: 08:30:49 | es-rollover-autoprov/11-check-indices | time="2023-11-06T08:30:49Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 08:30:49 | es-rollover-autoprov/11-check-indices | time="2023-11-06T08:30:49Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 08:30:59 | es-rollover-autoprov/11-check-indices | time="2023-11-06T08:30:59Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 08:31:09 | es-rollover-autoprov/11-check-indices | time="2023-11-06T08:31:09Z" level=info msg="Job of owner my-jaeger-es-rollover succeeded after my-jaeger-es-rollover 20.027450714s" logger.go:42: 08:31:10 | es-rollover-autoprov/11-check-indices | Job:kuttl-test-amused-manatee/05-check-indices created logger.go:42: 08:31:14 | es-rollover-autoprov/11-check-indices | test step completed 11-check-indices logger.go:42: 08:31:14 | es-rollover-autoprov/12-check-indices | starting test step 12-check-indices logger.go:42: 08:31:14 | es-rollover-autoprov/12-check-indices | Job:kuttl-test-amused-manatee/06-check-indices created logger.go:42: 08:31:18 | es-rollover-autoprov/12-check-indices | test step completed 12-check-indices logger.go:42: 08:31:18 | es-rollover-autoprov | es-rollover-autoprov events from ns kuttl-test-amused-manatee: logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:01 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestamusedmanateemyjaeger-1-d64656544 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestamusedmanateemyjaeger-1-d646565k5j22 replicaset-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamusedmanateemyjaeger-1-d646565k5j22 Binding Scheduled Successfully assigned kuttl-test-amused-manatee/elasticsearch-cdm-kuttltestamusedmanateemyjaeger-1-d646565k5j22 to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:01 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestamusedmanateemyjaeger-1-d646565k5j22 FailedMount MountVolume.SetUp failed for volume "elasticsearch-metrics" : secret "elasticsearch-metrics" not found kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:01 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestamusedmanateemyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestamusedmanateemyjaeger-1-d64656544 to 1 deployment-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:02 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamusedmanateemyjaeger-1-d646565k5j22 AddedInterface Add eth0 [10.129.2.25/23] from ovn-kubernetes logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:02 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamusedmanateemyjaeger-1-d646565k5j22.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:02 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamusedmanateemyjaeger-1-d646565k5j22.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:02 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamusedmanateemyjaeger-1-d646565k5j22.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:02 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamusedmanateemyjaeger-1-d646565k5j22.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:02 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamusedmanateemyjaeger-1-d646565k5j22.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:02 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestamusedmanateemyjaeger-1-d646565k5j22.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:12 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestamusedmanateemyjaeger-1-d646565k5j22.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:17 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestamusedmanateemyjaeger-1-d646565k5j22.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:28 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-mmmnd Binding Scheduled Successfully assigned kuttl-test-amused-manatee/my-jaeger-collector-5489f5bd9b-mmmnd to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:28 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-mmmnd AddedInterface Add eth0 [10.128.2.28/23] from ovn-kubernetes logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:28 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-mmmnd.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:28 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-mmmnd.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:28 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-mmmnd.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:28 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5489f5bd9b SuccessfulCreate Created pod: my-jaeger-collector-5489f5bd9b-mmmnd replicaset-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:28 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-5489f5bd9b to 1 deployment-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:28 +0000 UTC Normal Pod my-jaeger-query-5b547bfcbc-rpq2g Binding Scheduled Successfully assigned kuttl-test-amused-manatee/my-jaeger-query-5b547bfcbc-rpq2g to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:28 +0000 UTC Warning Pod my-jaeger-query-5b547bfcbc-rpq2g FailedMount MountVolume.SetUp failed for volume "my-jaeger-ui-oauth-proxy-tls" : secret "my-jaeger-ui-oauth-proxy-tls" not found kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:28 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-5b547bfcbc SuccessfulCreate Created pod: my-jaeger-query-5b547bfcbc-rpq2g replicaset-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:28 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-5b547bfcbc to 1 deployment-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:29 +0000 UTC Normal Pod my-jaeger-query-5b547bfcbc-rpq2g AddedInterface Add eth0 [10.131.0.27/23] from ovn-kubernetes logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:29 +0000 UTC Normal Pod my-jaeger-query-5b547bfcbc-rpq2g.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:29 +0000 UTC Normal Pod my-jaeger-query-5b547bfcbc-rpq2g.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:29 +0000 UTC Normal Pod my-jaeger-query-5b547bfcbc-rpq2g.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:29 +0000 UTC Normal Pod my-jaeger-query-5b547bfcbc-rpq2g.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:29 +0000 UTC Normal Pod my-jaeger-query-5b547bfcbc-rpq2g.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:29 +0000 UTC Normal Pod my-jaeger-query-5b547bfcbc-rpq2g.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:29 +0000 UTC Normal Pod my-jaeger-query-5b547bfcbc-rpq2g.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:29 +0000 UTC Normal Pod my-jaeger-query-5b547bfcbc-rpq2g.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:29 +0000 UTC Normal Pod my-jaeger-query-5b547bfcbc-rpq2g.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:35 +0000 UTC Normal Pod my-jaeger-query-5b547bfcbc-rpq2g.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:35 +0000 UTC Normal Pod my-jaeger-query-5b547bfcbc-rpq2g.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:35 +0000 UTC Normal Pod my-jaeger-query-5b547bfcbc-rpq2g.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:35 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-5b547bfcbc SuccessfulDelete Deleted pod: my-jaeger-query-5b547bfcbc-rpq2g replicaset-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:35 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-5b547bfcbc to 0 from 1 deployment-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:36 +0000 UTC Normal Pod my-jaeger-query-75f776fc5b-zhhlk Binding Scheduled Successfully assigned kuttl-test-amused-manatee/my-jaeger-query-75f776fc5b-zhhlk to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:36 +0000 UTC Normal Pod my-jaeger-query-75f776fc5b-zhhlk AddedInterface Add eth0 [10.131.0.28/23] from ovn-kubernetes logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:36 +0000 UTC Normal Pod my-jaeger-query-75f776fc5b-zhhlk.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:36 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-75f776fc5b SuccessfulCreate Created pod: my-jaeger-query-75f776fc5b-zhhlk replicaset-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:36 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-75f776fc5b to 1 deployment-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:37 +0000 UTC Normal Pod my-jaeger-query-75f776fc5b-zhhlk.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:37 +0000 UTC Normal Pod my-jaeger-query-75f776fc5b-zhhlk.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:37 +0000 UTC Normal Pod my-jaeger-query-75f776fc5b-zhhlk.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:37 +0000 UTC Normal Pod my-jaeger-query-75f776fc5b-zhhlk.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:37 +0000 UTC Normal Pod my-jaeger-query-75f776fc5b-zhhlk.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:37 +0000 UTC Normal Pod my-jaeger-query-75f776fc5b-zhhlk.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:37 +0000 UTC Normal Pod my-jaeger-query-75f776fc5b-zhhlk.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:37 +0000 UTC Normal Pod my-jaeger-query-75f776fc5b-zhhlk.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:40 +0000 UTC Normal Pod 00-report-span-wgz7v Binding Scheduled Successfully assigned kuttl-test-amused-manatee/00-report-span-wgz7v to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:40 +0000 UTC Normal Pod 00-report-span-wgz7v AddedInterface Add eth0 [10.128.2.29/23] from ovn-kubernetes logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:40 +0000 UTC Normal Pod 00-report-span-wgz7v.spec.containers{asserts-container} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:40 +0000 UTC Normal Job.batch 00-report-span SuccessfulCreate Created pod: 00-report-span-wgz7v job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:41 +0000 UTC Normal Pod 00-report-span-wgz7v.spec.containers{asserts-container} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" in 1.431s (1.431s including waiting) kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:42 +0000 UTC Normal Pod 00-report-span-wgz7v.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:42 +0000 UTC Normal Pod 00-report-span-wgz7v.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:43 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:43 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:28:43 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:05 +0000 UTC Normal Job.batch 00-report-span Completed Job completed job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:06 +0000 UTC Normal Pod 00-check-indices-n2gqs Binding Scheduled Successfully assigned kuttl-test-amused-manatee/00-check-indices-n2gqs to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:06 +0000 UTC Normal Pod 00-check-indices-n2gqs AddedInterface Add eth0 [10.128.2.30/23] from ovn-kubernetes logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:06 +0000 UTC Normal Pod 00-check-indices-n2gqs.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:06 +0000 UTC Normal Pod 00-check-indices-n2gqs.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:06 +0000 UTC Normal Pod 00-check-indices-n2gqs.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:06 +0000 UTC Normal Job.batch 00-check-indices SuccessfulCreate Created pod: 00-check-indices-n2gqs job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:09 +0000 UTC Normal Job.batch 00-check-indices Completed Job completed job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:10 +0000 UTC Normal Pod 01-check-indices-z94cn Binding Scheduled Successfully assigned kuttl-test-amused-manatee/01-check-indices-z94cn to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:10 +0000 UTC Normal Pod 01-check-indices-z94cn AddedInterface Add eth0 [10.128.2.31/23] from ovn-kubernetes logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:10 +0000 UTC Normal Pod 01-check-indices-z94cn.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:10 +0000 UTC Normal Job.batch 01-check-indices SuccessfulCreate Created pod: 01-check-indices-z94cn job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:11 +0000 UTC Normal Pod 01-check-indices-z94cn.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:11 +0000 UTC Normal Pod 01-check-indices-z94cn.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:13 +0000 UTC Normal Job.batch 01-check-indices Completed Job completed job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:13 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod my-jaeger-collector-5489f5bd9b-mmmnd horizontal-pod-autoscaler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:15 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-wzv4w Binding Scheduled Successfully assigned kuttl-test-amused-manatee/my-jaeger-es-rollover-create-mapping-wzv4w to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:15 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-wzv4w AddedInterface Add eth0 [10.128.2.32/23] from ovn-kubernetes logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:15 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-wzv4w.spec.containers{my-jaeger-es-rollover-create-mapping} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:edda53eae462f604654d8adc71622101d2f62c9de6ecce4484523341b972723a" kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:15 +0000 UTC Normal Job.batch my-jaeger-es-rollover-create-mapping SuccessfulCreate Created pod: my-jaeger-es-rollover-create-mapping-wzv4w job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:21 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-wzv4w.spec.containers{my-jaeger-es-rollover-create-mapping} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:edda53eae462f604654d8adc71622101d2f62c9de6ecce4484523341b972723a" in 5.691s (5.691s including waiting) kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:21 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-wzv4w.spec.containers{my-jaeger-es-rollover-create-mapping} Created Created container my-jaeger-es-rollover-create-mapping kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:21 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-wzv4w.spec.containers{my-jaeger-es-rollover-create-mapping} Started Started container my-jaeger-es-rollover-create-mapping kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:24 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-mmmnd.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:24 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5489f5bd9b SuccessfulDelete Deleted pod: my-jaeger-collector-5489f5bd9b-mmmnd replicaset-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:24 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled down replica set my-jaeger-collector-5489f5bd9b to 0 from 1 deployment-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:24 +0000 UTC Normal Job.batch my-jaeger-es-rollover-create-mapping Completed Job completed job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:24 +0000 UTC Normal Pod my-jaeger-query-75f776fc5b-zhhlk.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:24 +0000 UTC Normal Pod my-jaeger-query-75f776fc5b-zhhlk.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:24 +0000 UTC Normal Pod my-jaeger-query-75f776fc5b-zhhlk.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:24 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-75f776fc5b SuccessfulDelete Deleted pod: my-jaeger-query-75f776fc5b-zhhlk replicaset-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:24 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-75f776fc5b to 0 from 1 deployment-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:25 +0000 UTC Normal Pod my-jaeger-collector-7794fb6d5c-cdkm7 Binding Scheduled Successfully assigned kuttl-test-amused-manatee/my-jaeger-collector-7794fb6d5c-cdkm7 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:25 +0000 UTC Normal Pod my-jaeger-collector-7794fb6d5c-cdkm7 AddedInterface Add eth0 [10.128.2.33/23] from ovn-kubernetes logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:25 +0000 UTC Normal Pod my-jaeger-collector-7794fb6d5c-cdkm7.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:25 +0000 UTC Normal Pod my-jaeger-collector-7794fb6d5c-cdkm7.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:25 +0000 UTC Normal Pod my-jaeger-collector-7794fb6d5c-cdkm7.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:25 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-7794fb6d5c SuccessfulCreate Created pod: my-jaeger-collector-7794fb6d5c-cdkm7 replicaset-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:25 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-7794fb6d5c to 1 deployment-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:25 +0000 UTC Normal Pod my-jaeger-query-69d48d4489-km898 Binding Scheduled Successfully assigned kuttl-test-amused-manatee/my-jaeger-query-69d48d4489-km898 to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:25 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-69d48d4489 SuccessfulCreate Created pod: my-jaeger-query-69d48d4489-km898 replicaset-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:25 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-69d48d4489 to 1 deployment-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:26 +0000 UTC Normal Pod my-jaeger-query-69d48d4489-km898 AddedInterface Add eth0 [10.131.0.29/23] from ovn-kubernetes logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:26 +0000 UTC Normal Pod my-jaeger-query-69d48d4489-km898.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:26 +0000 UTC Normal Pod my-jaeger-query-69d48d4489-km898.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:26 +0000 UTC Normal Pod my-jaeger-query-69d48d4489-km898.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:26 +0000 UTC Normal Pod my-jaeger-query-69d48d4489-km898.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:26 +0000 UTC Normal Pod my-jaeger-query-69d48d4489-km898.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:26 +0000 UTC Normal Pod my-jaeger-query-69d48d4489-km898.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:26 +0000 UTC Normal Pod my-jaeger-query-69d48d4489-km898.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:26 +0000 UTC Normal Pod my-jaeger-query-69d48d4489-km898.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:26 +0000 UTC Normal Pod my-jaeger-query-69d48d4489-km898.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:32 +0000 UTC Normal Pod 02-report-span-zlz7p Binding Scheduled Successfully assigned kuttl-test-amused-manatee/02-report-span-zlz7p to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:32 +0000 UTC Normal Job.batch 02-report-span SuccessfulCreate Created pod: 02-report-span-zlz7p job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:33 +0000 UTC Normal Pod 02-report-span-zlz7p AddedInterface Add eth0 [10.128.2.34/23] from ovn-kubernetes logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:33 +0000 UTC Normal Pod 02-report-span-zlz7p.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:33 +0000 UTC Normal Pod 02-report-span-zlz7p.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:33 +0000 UTC Normal Pod 02-report-span-zlz7p.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:55 +0000 UTC Normal Pod 02-check-indices-j28gl Binding Scheduled Successfully assigned kuttl-test-amused-manatee/02-check-indices-j28gl to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:55 +0000 UTC Normal Job.batch 02-check-indices SuccessfulCreate Created pod: 02-check-indices-j28gl job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:55 +0000 UTC Normal Job.batch 02-report-span Completed Job completed job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:56 +0000 UTC Normal Pod 02-check-indices-j28gl AddedInterface Add eth0 [10.128.2.35/23] from ovn-kubernetes logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:56 +0000 UTC Normal Pod 02-check-indices-j28gl.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:56 +0000 UTC Normal Pod 02-check-indices-j28gl.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:56 +0000 UTC Normal Pod 02-check-indices-j28gl.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:59 +0000 UTC Normal Job.batch 02-check-indices Completed Job completed job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:59 +0000 UTC Normal Pod 03-check-indices-s6cp8 Binding Scheduled Successfully assigned kuttl-test-amused-manatee/03-check-indices-s6cp8 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:29:59 +0000 UTC Normal Job.batch 03-check-indices SuccessfulCreate Created pod: 03-check-indices-s6cp8 job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:00 +0000 UTC Normal Pod 03-check-indices-s6cp8 AddedInterface Add eth0 [10.128.2.36/23] from ovn-kubernetes logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:00 +0000 UTC Normal Pod 03-check-indices-s6cp8.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:00 +0000 UTC Normal Pod 03-check-indices-s6cp8.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:00 +0000 UTC Normal Pod 03-check-indices-s6cp8.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28320990-wcss2 Binding Scheduled Successfully assigned kuttl-test-amused-manatee/my-jaeger-es-lookback-28320990-wcss2 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28320990-wcss2 AddedInterface Add eth0 [10.128.2.38/23] from ovn-kubernetes logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28320990-wcss2.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:edda53eae462f604654d8adc71622101d2f62c9de6ecce4484523341b972723a" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28320990-wcss2.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28320990-wcss2.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28320990 SuccessfulCreate Created pod: my-jaeger-es-lookback-28320990-wcss2 job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28320990 cronjob-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28320990-jzt8f Binding Scheduled Successfully assigned kuttl-test-amused-manatee/my-jaeger-es-rollover-28320990-jzt8f to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28320990-jzt8f AddedInterface Add eth0 [10.128.2.37/23] from ovn-kubernetes logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28320990-jzt8f.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:edda53eae462f604654d8adc71622101d2f62c9de6ecce4484523341b972723a" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28320990-jzt8f.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28320990-jzt8f.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28320990 SuccessfulCreate Created pod: my-jaeger-es-rollover-28320990-jzt8f job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28320990 cronjob-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:03 +0000 UTC Normal Job.batch 03-check-indices Completed Job completed job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:03 +0000 UTC Normal Pod 04-check-indices-6lhq6 Binding Scheduled Successfully assigned kuttl-test-amused-manatee/04-check-indices-6lhq6 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:03 +0000 UTC Normal Job.batch 04-check-indices SuccessfulCreate Created pod: 04-check-indices-6lhq6 job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28320990 Completed Job completed job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28320990, status: Complete cronjob-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28320990 Completed Job completed job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28320990, status: Complete cronjob-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:04 +0000 UTC Normal Pod 04-check-indices-6lhq6 AddedInterface Add eth0 [10.128.2.40/23] from ovn-kubernetes logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:04 +0000 UTC Normal Pod 04-check-indices-6lhq6.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:04 +0000 UTC Normal Pod 04-check-indices-6lhq6.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:04 +0000 UTC Normal Pod 04-check-indices-6lhq6.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:07 +0000 UTC Normal Job.batch 04-check-indices Completed Job completed job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:16 +0000 UTC Normal Pod 03-report-span-qlrvf Binding Scheduled Successfully assigned kuttl-test-amused-manatee/03-report-span-qlrvf to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:16 +0000 UTC Normal Pod 03-report-span-qlrvf AddedInterface Add eth0 [10.128.2.41/23] from ovn-kubernetes logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:16 +0000 UTC Normal Pod 03-report-span-qlrvf.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:16 +0000 UTC Normal Pod 03-report-span-qlrvf.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:16 +0000 UTC Normal Pod 03-report-span-qlrvf.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:16 +0000 UTC Normal Job.batch 03-report-span SuccessfulCreate Created pod: 03-report-span-qlrvf job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:28 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:28 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod my-jaeger-collector-7794fb6d5c-cdkm7 horizontal-pod-autoscaler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:28 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:30:39 +0000 UTC Normal Job.batch 03-report-span Completed Job completed job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28320991-t9sf5 Binding Scheduled Successfully assigned kuttl-test-amused-manatee/my-jaeger-es-lookback-28320991-t9sf5 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28320991-t9sf5 AddedInterface Add eth0 [10.128.2.42/23] from ovn-kubernetes logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28320991-t9sf5.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:edda53eae462f604654d8adc71622101d2f62c9de6ecce4484523341b972723a" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28320991-t9sf5.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28320991-t9sf5.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28320991 SuccessfulCreate Created pod: my-jaeger-es-lookback-28320991-t9sf5 job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28320991 cronjob-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28320991-5wxqk Binding Scheduled Successfully assigned kuttl-test-amused-manatee/my-jaeger-es-rollover-28320991-5wxqk to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28320991-5wxqk AddedInterface Add eth0 [10.128.2.43/23] from ovn-kubernetes logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28320991-5wxqk.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:edda53eae462f604654d8adc71622101d2f62c9de6ecce4484523341b972723a" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28320991-5wxqk.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28320991-5wxqk.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28320991 SuccessfulCreate Created pod: my-jaeger-es-rollover-28320991-5wxqk job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28320991 cronjob-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28320991 Completed Job completed job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28320991, status: Complete cronjob-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28320991 Completed Job completed job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28320991, status: Complete cronjob-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:10 +0000 UTC Normal Pod 05-check-indices-mgvrc Binding Scheduled Successfully assigned kuttl-test-amused-manatee/05-check-indices-mgvrc to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:10 +0000 UTC Normal Pod 05-check-indices-mgvrc AddedInterface Add eth0 [10.128.2.44/23] from ovn-kubernetes logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:10 +0000 UTC Normal Pod 05-check-indices-mgvrc.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:10 +0000 UTC Normal Pod 05-check-indices-mgvrc.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:10 +0000 UTC Normal Pod 05-check-indices-mgvrc.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:10 +0000 UTC Normal Job.batch 05-check-indices SuccessfulCreate Created pod: 05-check-indices-mgvrc job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:13 +0000 UTC Normal Job.batch 05-check-indices Completed Job completed job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:14 +0000 UTC Normal Pod 06-check-indices-cfzpv Binding Scheduled Successfully assigned kuttl-test-amused-manatee/06-check-indices-cfzpv to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:14 +0000 UTC Normal Pod 06-check-indices-cfzpv AddedInterface Add eth0 [10.128.2.45/23] from ovn-kubernetes logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:14 +0000 UTC Normal Pod 06-check-indices-cfzpv.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:14 +0000 UTC Normal Pod 06-check-indices-cfzpv.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:14 +0000 UTC Normal Pod 06-check-indices-cfzpv.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:14 +0000 UTC Normal Job.batch 06-check-indices SuccessfulCreate Created pod: 06-check-indices-cfzpv job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | 2023-11-06 08:31:17 +0000 UTC Normal Job.batch 06-check-indices Completed Job completed job-controller logger.go:42: 08:31:18 | es-rollover-autoprov | Deleting namespace: kuttl-test-amused-manatee === CONT kuttl/harness/es-increasing-replicas logger.go:42: 08:31:25 | es-increasing-replicas | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:31:25 | es-increasing-replicas | Ignoring check-es-nodes.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:31:25 | es-increasing-replicas | Ignoring openshift-check-es-nodes.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:31:25 | es-increasing-replicas | Creating namespace: kuttl-test-key-dodo logger.go:42: 08:31:25 | es-increasing-replicas/1-install | starting test step 1-install logger.go:42: 08:31:25 | es-increasing-replicas/1-install | Jaeger:kuttl-test-key-dodo/simple-prod created logger.go:42: 08:32:00 | es-increasing-replicas/1-install | test step completed 1-install logger.go:42: 08:32:00 | es-increasing-replicas/2-install | starting test step 2-install logger.go:42: 08:32:00 | es-increasing-replicas/2-install | Jaeger:kuttl-test-key-dodo/simple-prod updated logger.go:42: 08:32:10 | es-increasing-replicas/2-install | test step completed 2-install logger.go:42: 08:32:10 | es-increasing-replicas/3-smoke-test | starting test step 3-smoke-test logger.go:42: 08:32:10 | es-increasing-replicas/3-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 08:32:12 | es-increasing-replicas/3-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:32:18 | es-increasing-replicas/3-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:32:18 | es-increasing-replicas/3-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:32:18 | es-increasing-replicas/3-smoke-test | job.batch/report-span created logger.go:42: 08:32:19 | es-increasing-replicas/3-smoke-test | job.batch/check-span created logger.go:42: 08:32:24 | es-increasing-replicas/3-smoke-test | test step completed 3-smoke-test logger.go:42: 08:32:24 | es-increasing-replicas/4-install | starting test step 4-install logger.go:42: 08:32:24 | es-increasing-replicas/4-install | Jaeger:kuttl-test-key-dodo/simple-prod updated logger.go:42: 08:32:24 | es-increasing-replicas/4-install | test step completed 4-install logger.go:42: 08:32:24 | es-increasing-replicas/5-check-es-nodes | starting test step 5-check-es-nodes logger.go:42: 08:32:24 | es-increasing-replicas/5-check-es-nodes | running command: [sh -c ./check-es-nodes.sh $NAMESPACE] logger.go:42: 08:32:24 | es-increasing-replicas/5-check-es-nodes | Checking if the number of ES instances is the expected logger.go:42: 08:32:24 | es-increasing-replicas/5-check-es-nodes | false logger.go:42: 08:32:24 | es-increasing-replicas/5-check-es-nodes | Error: no matches found logger.go:42: 08:32:29 | es-increasing-replicas/5-check-es-nodes | Checking if the number of ES instances is the expected logger.go:42: 08:32:29 | es-increasing-replicas/5-check-es-nodes | true logger.go:42: 08:32:29 | es-increasing-replicas/5-check-es-nodes | test step completed 5-check-es-nodes logger.go:42: 08:32:29 | es-increasing-replicas | es-increasing-replicas events from ns kuttl-test-key-dodo: logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestkeydodosimpleprod-1-76db67bdc4-ss2df Binding Scheduled Successfully assigned kuttl-test-key-dodo/elasticsearch-cdm-kuttltestkeydodosimpleprod-1-76db67bdc4-ss2df to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestkeydodosimpleprod-1-76db67bdc4-ss2df AddedInterface Add eth0 [10.128.2.46/23] from ovn-kubernetes logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestkeydodosimpleprod-1-76db67bdc4-ss2df.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestkeydodosimpleprod-1-76db67bdc4-ss2df.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestkeydodosimpleprod-1-76db67bdc4-ss2df.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestkeydodosimpleprod-1-76db67bdc4-ss2df.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestkeydodosimpleprod-1-76db67bdc4-ss2df.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestkeydodosimpleprod-1-76db67bdc4-ss2df.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:31 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestkeydodosimpleprod-1-76db67bdc4 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestkeydodosimpleprod-1-76db67bdc4-ss2df replicaset-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:31 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestkeydodosimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestkeydodosimpleprod-1-76db67bdc4 to 1 deployment-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:41 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestkeydodosimpleprod-1-76db67bdc4-ss2df.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:46 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestkeydodosimpleprod-1-76db67bdc4-ss2df.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:57 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-6dn27 Binding Scheduled Successfully assigned kuttl-test-key-dodo/simple-prod-collector-77fcbdc546-6dn27 to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:57 +0000 UTC Warning Pod simple-prod-collector-77fcbdc546-6dn27 FailedMount MountVolume.SetUp failed for volume "simple-prod-collector-tls-config-volume" : secret "simple-prod-collector-headless-tls" not found kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:57 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-77fcbdc546 SuccessfulCreate Created pod: simple-prod-collector-77fcbdc546-6dn27 replicaset-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:57 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-77fcbdc546 to 1 deployment-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:57 +0000 UTC Normal Pod simple-prod-query-867bf8c689-rfdjj Binding Scheduled Successfully assigned kuttl-test-key-dodo/simple-prod-query-867bf8c689-rfdjj to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:57 +0000 UTC Warning Pod simple-prod-query-867bf8c689-rfdjj FailedMount MountVolume.SetUp failed for volume "simple-prod-ui-oauth-proxy-tls" : secret "simple-prod-ui-oauth-proxy-tls" not found kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:57 +0000 UTC Normal ReplicaSet.apps simple-prod-query-867bf8c689 SuccessfulCreate Created pod: simple-prod-query-867bf8c689-rfdjj replicaset-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:57 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-867bf8c689 to 1 deployment-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:58 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-6dn27 AddedInterface Add eth0 [10.131.0.30/23] from ovn-kubernetes logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:58 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-6dn27.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:58 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-6dn27.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:58 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-6dn27.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:58 +0000 UTC Normal Pod simple-prod-query-867bf8c689-rfdjj AddedInterface Add eth0 [10.131.0.31/23] from ovn-kubernetes logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:58 +0000 UTC Normal Pod simple-prod-query-867bf8c689-rfdjj.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:58 +0000 UTC Normal Pod simple-prod-query-867bf8c689-rfdjj.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:58 +0000 UTC Normal Pod simple-prod-query-867bf8c689-rfdjj.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:58 +0000 UTC Normal Pod simple-prod-query-867bf8c689-rfdjj.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:58 +0000 UTC Normal Pod simple-prod-query-867bf8c689-rfdjj.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:58 +0000 UTC Normal Pod simple-prod-query-867bf8c689-rfdjj.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:58 +0000 UTC Normal Pod simple-prod-query-867bf8c689-rfdjj.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:58 +0000 UTC Normal Pod simple-prod-query-867bf8c689-rfdjj.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:31:58 +0000 UTC Normal Pod simple-prod-query-867bf8c689-rfdjj.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:03 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-j9l2m Binding Scheduled Successfully assigned kuttl-test-key-dodo/simple-prod-collector-77fcbdc546-j9l2m to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:03 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-77fcbdc546 SuccessfulCreate Created pod: simple-prod-collector-77fcbdc546-j9l2m replicaset-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:03 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-77fcbdc546 to 2 from 1 deployment-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:03 +0000 UTC Normal Pod simple-prod-query-867bf8c689-jn45r Binding Scheduled Successfully assigned kuttl-test-key-dodo/simple-prod-query-867bf8c689-jn45r to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:03 +0000 UTC Normal ReplicaSet.apps simple-prod-query-867bf8c689 SuccessfulCreate Created pod: simple-prod-query-867bf8c689-jn45r replicaset-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:03 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-867bf8c689 to 2 from 1 deployment-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:04 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-j9l2m AddedInterface Add eth0 [10.129.2.26/23] from ovn-kubernetes logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:04 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-j9l2m.spec.containers{jaeger-collector} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:04 +0000 UTC Normal Pod simple-prod-query-867bf8c689-jn45r AddedInterface Add eth0 [10.129.2.27/23] from ovn-kubernetes logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:04 +0000 UTC Normal Pod simple-prod-query-867bf8c689-jn45r.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:06 +0000 UTC Normal Pod simple-prod-query-867bf8c689-jn45r.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" in 2.118s (2.118s including waiting) kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:06 +0000 UTC Normal Pod simple-prod-query-867bf8c689-jn45r.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:06 +0000 UTC Normal Pod simple-prod-query-867bf8c689-jn45r.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:06 +0000 UTC Normal Pod simple-prod-query-867bf8c689-jn45r.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:06 +0000 UTC Normal Pod simple-prod-query-867bf8c689-jn45r.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:06 +0000 UTC Normal Pod simple-prod-query-867bf8c689-jn45r.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:06 +0000 UTC Normal Pod simple-prod-query-867bf8c689-jn45r.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:07 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-j9l2m.spec.containers{jaeger-collector} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" in 2.673s (2.673s including waiting) kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:07 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-j9l2m.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:07 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-j9l2m.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:08 +0000 UTC Normal Pod simple-prod-query-867bf8c689-jn45r.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" in 1.66s (1.66s including waiting) kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:08 +0000 UTC Normal Pod simple-prod-query-867bf8c689-jn45r.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:08 +0000 UTC Normal Pod simple-prod-query-867bf8c689-jn45r.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:13 +0000 UTC Normal Pod simple-prod-query-867bf8c689-jn45r.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:13 +0000 UTC Normal Pod simple-prod-query-867bf8c689-jn45r.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:13 +0000 UTC Normal Pod simple-prod-query-867bf8c689-jn45r.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:13 +0000 UTC Normal Pod simple-prod-query-867bf8c689-rfdjj.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:13 +0000 UTC Normal Pod simple-prod-query-867bf8c689-rfdjj.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:13 +0000 UTC Normal Pod simple-prod-query-867bf8c689-rfdjj.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:13 +0000 UTC Normal ReplicaSet.apps simple-prod-query-867bf8c689 SuccessfulDelete Deleted pod: simple-prod-query-867bf8c689-rfdjj replicaset-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:13 +0000 UTC Normal ReplicaSet.apps simple-prod-query-867bf8c689 SuccessfulDelete Deleted pod: simple-prod-query-867bf8c689-jn45r replicaset-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:13 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-867bf8c689 to 0 from 2 deployment-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-7dt8c Binding Scheduled Successfully assigned kuttl-test-key-dodo/simple-prod-query-6dbc7f469f-7dt8c to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-7dt8c AddedInterface Add eth0 [10.129.2.28/23] from ovn-kubernetes logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-7dt8c.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-7dt8c.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-7dt8c.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-7dt8c.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-7dt8c.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-7dt8c.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-7dt8c.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-7dt8c.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-7dt8c.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-tsjdm Binding Scheduled Successfully assigned kuttl-test-key-dodo/simple-prod-query-6dbc7f469f-tsjdm to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-tsjdm AddedInterface Add eth0 [10.131.0.32/23] from ovn-kubernetes logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-tsjdm.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-tsjdm.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-tsjdm.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-tsjdm.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-tsjdm.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-tsjdm.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-tsjdm.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-tsjdm.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-tsjdm.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6dbc7f469f SuccessfulCreate Created pod: simple-prod-query-6dbc7f469f-tsjdm replicaset-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6dbc7f469f SuccessfulCreate Created pod: simple-prod-query-6dbc7f469f-7dt8c replicaset-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:14 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-6dbc7f469f to 2 deployment-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:19 +0000 UTC Normal Pod check-span-gbzwb Binding Scheduled Successfully assigned kuttl-test-key-dodo/check-span-gbzwb to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:19 +0000 UTC Normal Pod check-span-gbzwb AddedInterface Add eth0 [10.131.0.33/23] from ovn-kubernetes logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:19 +0000 UTC Normal Pod check-span-gbzwb.spec.containers{asserts-container} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:19 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-gbzwb job-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:19 +0000 UTC Normal Pod report-span-cb8jt Binding Scheduled Successfully assigned kuttl-test-key-dodo/report-span-cb8jt to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:19 +0000 UTC Normal Pod report-span-cb8jt AddedInterface Add eth0 [10.129.2.29/23] from ovn-kubernetes logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:19 +0000 UTC Normal Pod report-span-cb8jt.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:19 +0000 UTC Normal Pod report-span-cb8jt.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:19 +0000 UTC Normal Pod report-span-cb8jt.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:19 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-cb8jt job-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:20 +0000 UTC Normal Pod check-span-gbzwb.spec.containers{asserts-container} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" in 1.339s (1.339s including waiting) kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:20 +0000 UTC Normal Pod check-span-gbzwb.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:20 +0000 UTC Normal Pod check-span-gbzwb.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:24 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestkeydodosimpleprod-2-7b59dd68d6-qhct8 Binding Scheduled Successfully assigned kuttl-test-key-dodo/elasticsearch-cdm-kuttltestkeydodosimpleprod-2-7b59dd68d6-qhct8 to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestkeydodosimpleprod-2-7b59dd68d6-qhct8 AddedInterface Add eth0 [10.129.2.30/23] from ovn-kubernetes logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestkeydodosimpleprod-2-7b59dd68d6-qhct8.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestkeydodosimpleprod-2-7b59dd68d6-qhct8.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestkeydodosimpleprod-2-7b59dd68d6-qhct8.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestkeydodosimpleprod-2-7b59dd68d6-qhct8.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestkeydodosimpleprod-2-7b59dd68d6 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestkeydodosimpleprod-2-7b59dd68d6-qhct8 replicaset-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestkeydodosimpleprod-2 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestkeydodosimpleprod-2-7b59dd68d6 to 1 deployment-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-6dn27.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-j9l2m.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-77fcbdc546 SuccessfulDelete Deleted pod: simple-prod-collector-77fcbdc546-6dn27 replicaset-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-77fcbdc546 SuccessfulDelete Deleted pod: simple-prod-collector-77fcbdc546-j9l2m replicaset-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled down replica set simple-prod-collector-77fcbdc546 to 0 from 2 deployment-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-7dt8c.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-7dt8c.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-7dt8c.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-tsjdm.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-tsjdm.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal Pod simple-prod-query-6dbc7f469f-tsjdm.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6dbc7f469f SuccessfulDelete Deleted pod: simple-prod-query-6dbc7f469f-tsjdm replicaset-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6dbc7f469f SuccessfulDelete Deleted pod: simple-prod-query-6dbc7f469f-7dt8c replicaset-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:25 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-6dbc7f469f to 0 from 2 deployment-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestkeydodosimpleprod-2-7b59dd68d6-qhct8.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestkeydodosimpleprod-2-7b59dd68d6-qhct8.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-79ll9 Binding Scheduled Successfully assigned kuttl-test-key-dodo/simple-prod-collector-5db88495b5-79ll9 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-79ll9 AddedInterface Add eth0 [10.128.2.47/23] from ovn-kubernetes logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-79ll9.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-79ll9.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-79ll9.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-xfvp6 Binding Scheduled Successfully assigned kuttl-test-key-dodo/simple-prod-collector-5db88495b5-xfvp6 to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-xfvp6 AddedInterface Add eth0 [10.131.0.34/23] from ovn-kubernetes logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-xfvp6.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-xfvp6.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-xfvp6.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5db88495b5 SuccessfulCreate Created pod: simple-prod-collector-5db88495b5-xfvp6 replicaset-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5db88495b5 SuccessfulCreate Created pod: simple-prod-collector-5db88495b5-79ll9 replicaset-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-5db88495b5 to 2 deployment-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-vxppl Binding Scheduled Successfully assigned kuttl-test-key-dodo/simple-prod-query-6c6ff7f5d8-vxppl to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-vxppl AddedInterface Add eth0 [10.131.0.35/23] from ovn-kubernetes logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-vxppl.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-vxppl.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-vxppl.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-vxppl.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-vxppl.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-vxppl.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-vxppl.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-vxppl.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-vxppl.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-zpqqt Binding Scheduled Successfully assigned kuttl-test-key-dodo/simple-prod-query-6c6ff7f5d8-zpqqt to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-zpqqt AddedInterface Add eth0 [10.129.2.31/23] from ovn-kubernetes logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-zpqqt.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-zpqqt.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-zpqqt.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-zpqqt.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6c6ff7f5d8 SuccessfulCreate Created pod: simple-prod-query-6c6ff7f5d8-vxppl replicaset-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6c6ff7f5d8 SuccessfulCreate Created pod: simple-prod-query-6c6ff7f5d8-zpqqt replicaset-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:26 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-6c6ff7f5d8 to 2 deployment-controller logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:27 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-zpqqt.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:27 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-zpqqt.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:27 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-zpqqt.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:27 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-zpqqt.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:32:29 | es-increasing-replicas | 2023-11-06 08:32:27 +0000 UTC Normal Pod simple-prod-query-6c6ff7f5d8-zpqqt.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:32:29 | es-increasing-replicas | Deleting namespace: kuttl-test-key-dodo === CONT kuttl/harness/es-index-cleaner-autoprov logger.go:42: 08:33:03 | es-index-cleaner-autoprov | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:33:03 | es-index-cleaner-autoprov | Creating namespace: kuttl-test-organic-monkfish logger.go:42: 08:33:03 | es-index-cleaner-autoprov/1-install | starting test step 1-install logger.go:42: 08:33:03 | es-index-cleaner-autoprov/1-install | Jaeger:kuttl-test-organic-monkfish/test-es-index-cleaner-with-prefix created logger.go:42: 08:33:40 | es-index-cleaner-autoprov/1-install | test step completed 1-install logger.go:42: 08:33:40 | es-index-cleaner-autoprov/2-report-spans | starting test step 2-report-spans logger.go:42: 08:33:40 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE test-es-index-cleaner-with-prefix /dev/null] logger.go:42: 08:33:41 | es-index-cleaner-autoprov/2-report-spans | Warning: resource jaegers/test-es-index-cleaner-with-prefix is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:33:47 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c DAYS=5 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JOB_NUMBER=00 JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-00-job.yaml] logger.go:42: 08:33:48 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c kubectl apply -f report-span-00-job.yaml -n $NAMESPACE] logger.go:42: 08:33:48 | es-index-cleaner-autoprov/2-report-spans | job.batch/00-report-span created logger.go:42: 08:34:27 | es-index-cleaner-autoprov/2-report-spans | test step completed 2-report-spans logger.go:42: 08:34:27 | es-index-cleaner-autoprov/3-install | starting test step 3-install logger.go:42: 08:34:27 | es-index-cleaner-autoprov/3-install | Jaeger:kuttl-test-organic-monkfish/test-es-index-cleaner-with-prefix updated logger.go:42: 08:34:27 | es-index-cleaner-autoprov/3-install | test step completed 3-install logger.go:42: 08:34:27 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | starting test step 4-wait-es-index-cleaner logger.go:42: 08:34:27 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | running command: [sh -c go run ../../../../cmd-utils/wait-cronjob/main.go --cronjob test-es-index-cleaner-with-prefix-es-index-cleaner --namespace $NAMESPACE] logger.go:42: 08:34:28 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-06T08:34:28Z" level=debug msg="Checking if the test-es-index-cleaner-with-prefix-es-index-cleaner CronJob exists" logger.go:42: 08:34:28 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-06T08:34:28Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 08:34:28 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-06T08:34:28Z" level=warning msg="The BatchV1/Cronjob test-es-index-cleaner-with-prefix-es-index-cleaner was not found" logger.go:42: 08:34:28 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-06T08:34:28Z" level=debug msg="Found BatchV/Cronjobs:" logger.go:42: 08:34:28 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-06T08:34:28Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 08:34:28 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-06T08:34:28Z" level=warning msg="The BatchV1/Cronjob test-es-index-cleaner-with-prefix-es-index-cleaner was not found" logger.go:42: 08:34:28 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-06T08:34:28Z" level=debug msg="Found BatchV/Cronjobs:" logger.go:42: 08:34:38 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-06T08:34:38Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 08:34:38 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-06T08:34:38Z" level=info msg="Cronjob test-es-index-cleaner-with-prefix-es-index-cleaner found successfully" logger.go:42: 08:34:38 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-06T08:34:38Z" level=debug msg="Waiting for the next scheduled job from test-es-index-cleaner-with-prefix-es-index-cleaner cronjob" logger.go:42: 08:34:38 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-06T08:34:38Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 08:34:38 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-06T08:34:38Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 08:34:48 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-06T08:34:48Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 08:34:58 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-06T08:34:58Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 08:35:08 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-11-06T08:35:08Z" level=info msg="Job of owner test-es-index-cleaner-with-prefix-es-index-cleaner succeeded after test-es-index-cleaner-with-prefix-es-index-cleaner 30.021100334s" logger.go:42: 08:35:08 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | test step completed 4-wait-es-index-cleaner logger.go:42: 08:35:08 | es-index-cleaner-autoprov/5-install | starting test step 5-install logger.go:42: 08:35:08 | es-index-cleaner-autoprov/5-install | Jaeger:kuttl-test-organic-monkfish/test-es-index-cleaner-with-prefix updated logger.go:42: 08:35:08 | es-index-cleaner-autoprov/5-install | test step completed 5-install logger.go:42: 08:35:08 | es-index-cleaner-autoprov/6-check-indices | starting test step 6-check-indices logger.go:42: 08:35:08 | es-index-cleaner-autoprov/6-check-indices | Job:kuttl-test-organic-monkfish/00-check-indices created logger.go:42: 08:35:11 | es-index-cleaner-autoprov/6-check-indices | test step completed 6-check-indices logger.go:42: 08:35:11 | es-index-cleaner-autoprov | es-index-cleaner-autoprov events from ns kuttl-test-organic-monkfish: logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:09 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestorganicmonkfishtestesindexc-1-7db44ff644 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestorganicmonkfishtestesindexc-1-7tqzr2 replicaset-controller logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestorganicmonkfishtestesindexc-1-7tqzr2 Binding Scheduled Successfully assigned kuttl-test-organic-monkfish/elasticsearch-cdm-kuttltestorganicmonkfishtestesindexc-1-7tqzr2 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:09 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestorganicmonkfishtestesindexc-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestorganicmonkfishtestesindexc-1-7db44ff644 to 1 deployment-controller logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestorganicmonkfishtestesindexc-1-7tqzr2 AddedInterface Add eth0 [10.128.2.48/23] from ovn-kubernetes logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestorganicmonkfishtestesindexc-1-7tqzr2.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestorganicmonkfishtestesindexc-1-7tqzr2.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestorganicmonkfishtestesindexc-1-7tqzr2.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestorganicmonkfishtestesindexc-1-7tqzr2.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestorganicmonkfishtestesindexc-1-7tqzr2.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestorganicmonkfishtestesindexc-1-7tqzr2.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:20 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestorganicmonkfishtestesindexc-1-7tqzr2.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:25 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestorganicmonkfishtestesindexc-1-7tqzr2.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:36 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-7f88446db8-5dd52 Binding Scheduled Successfully assigned kuttl-test-organic-monkfish/test-es-index-cleaner-with-prefix-collector-7f88446db8-5dd52 to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:36 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-collector-7f88446db8 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-collector-7f88446db8-5dd52 replicaset-controller logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:36 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-collector ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-collector-7f88446db8 to 1 deployment-controller logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:36 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-767f97f978-xmm5b Binding Scheduled Successfully assigned kuttl-test-organic-monkfish/test-es-index-cleaner-with-prefix-query-767f97f978-xmm5b to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:36 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-767f97f978 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-query-767f97f978-xmm5b replicaset-controller logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:36 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-query-767f97f978 to 1 deployment-controller logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:37 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-7f88446db8-5dd52 AddedInterface Add eth0 [10.129.2.32/23] from ovn-kubernetes logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:37 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-7f88446db8-5dd52.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:37 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-7f88446db8-5dd52.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:37 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-7f88446db8-5dd52.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:37 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-767f97f978-xmm5b AddedInterface Add eth0 [10.131.0.36/23] from ovn-kubernetes logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:37 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-767f97f978-xmm5b.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:37 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-767f97f978-xmm5b.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:37 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-767f97f978-xmm5b.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:37 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-767f97f978-xmm5b.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:37 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-767f97f978-xmm5b.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:37 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-767f97f978-xmm5b.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:37 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-767f97f978-xmm5b.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:37 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-767f97f978-xmm5b.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:37 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-767f97f978-xmm5b.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:43 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-767f97f978-xmm5b.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:43 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-767f97f978-xmm5b.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:43 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-767f97f978-xmm5b.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:43 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-767f97f978 SuccessfulDelete Deleted pod: test-es-index-cleaner-with-prefix-query-767f97f978-xmm5b replicaset-controller logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:43 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled down replica set test-es-index-cleaner-with-prefix-query-767f97f978 to 0 from 1 deployment-controller logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-67f4c7dc4c-wvlq5 Binding Scheduled Successfully assigned kuttl-test-organic-monkfish/test-es-index-cleaner-with-prefix-query-67f4c7dc4c-wvlq5 to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:44 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-67f4c7dc4c SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-query-67f4c7dc4c-wvlq5 replicaset-controller logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:44 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-query-67f4c7dc4c to 1 deployment-controller logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:45 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-67f4c7dc4c-wvlq5 AddedInterface Add eth0 [10.131.0.37/23] from ovn-kubernetes logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:45 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-67f4c7dc4c-wvlq5.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:45 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-67f4c7dc4c-wvlq5.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:45 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-67f4c7dc4c-wvlq5.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:45 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-67f4c7dc4c-wvlq5.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:45 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-67f4c7dc4c-wvlq5.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:45 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-67f4c7dc4c-wvlq5.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:45 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-67f4c7dc4c-wvlq5.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:45 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-67f4c7dc4c-wvlq5.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:45 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-67f4c7dc4c-wvlq5.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:48 +0000 UTC Normal Pod 00-report-span-krmjm Binding Scheduled Successfully assigned kuttl-test-organic-monkfish/00-report-span-krmjm to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:48 +0000 UTC Normal Job.batch 00-report-span SuccessfulCreate Created pod: 00-report-span-krmjm job-controller logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:49 +0000 UTC Normal Pod 00-report-span-krmjm AddedInterface Add eth0 [10.129.2.33/23] from ovn-kubernetes logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:49 +0000 UTC Normal Pod 00-report-span-krmjm.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:49 +0000 UTC Normal Pod 00-report-span-krmjm.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:49 +0000 UTC Normal Pod 00-report-span-krmjm.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:51 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:52 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:33:52 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:34:22 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:34:22 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod test-es-index-cleaner-with-prefix-collector-7f88446db8-5dd52 horizontal-pod-autoscaler logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:34:22 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:34:27 +0000 UTC Normal Job.batch 00-report-span Completed Job completed job-controller logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:35:00 +0000 UTC Normal Job.batch test-es-index-cleaner-with-prefix-es-index-cleaner-28320995 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-es-index-cleaner-2832099cmsfp job-controller logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:35:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2832099cmsfp Binding Scheduled Successfully assigned kuttl-test-organic-monkfish/test-es-index-cleaner-with-prefix-es-index-cleaner-2832099cmsfp to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:35:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2832099cmsfp AddedInterface Add eth0 [10.129.2.34/23] from ovn-kubernetes logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:35:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2832099cmsfp.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-index-cleaner-rhel8@sha256:08ca2463363916637592e6c1cc1731784e07860269292b216db3e6fd0eb44382" kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:35:00 +0000 UTC Normal CronJob.batch test-es-index-cleaner-with-prefix-es-index-cleaner SuccessfulCreate Created job test-es-index-cleaner-with-prefix-es-index-cleaner-28320995 cronjob-controller logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:35:03 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2832099cmsfp.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-index-cleaner-rhel8@sha256:08ca2463363916637592e6c1cc1731784e07860269292b216db3e6fd0eb44382" in 3.331s (3.331s including waiting) kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:35:03 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2832099cmsfp.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Created Created container test-es-index-cleaner-with-prefix-es-index-cleaner kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:35:04 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2832099cmsfp.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Started Started container test-es-index-cleaner-with-prefix-es-index-cleaner kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:35:06 +0000 UTC Normal Job.batch test-es-index-cleaner-with-prefix-es-index-cleaner-28320995 Completed Job completed job-controller logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:35:06 +0000 UTC Normal CronJob.batch test-es-index-cleaner-with-prefix-es-index-cleaner SawCompletedJob Saw completed job: test-es-index-cleaner-with-prefix-es-index-cleaner-28320995, status: Complete cronjob-controller logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:35:08 +0000 UTC Normal Pod 00-check-indices-hxc22 Binding Scheduled Successfully assigned kuttl-test-organic-monkfish/00-check-indices-hxc22 to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:35:08 +0000 UTC Normal Job.batch 00-check-indices SuccessfulCreate Created pod: 00-check-indices-hxc22 job-controller logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:35:09 +0000 UTC Normal Pod 00-check-indices-hxc22 AddedInterface Add eth0 [10.129.2.35/23] from ovn-kubernetes logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:35:09 +0000 UTC Normal Pod 00-check-indices-hxc22.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:35:09 +0000 UTC Normal Pod 00-check-indices-hxc22.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:35:09 +0000 UTC Normal Pod 00-check-indices-hxc22.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:35:11 | es-index-cleaner-autoprov | 2023-11-06 08:35:11 +0000 UTC Normal Job.batch 00-check-indices Completed Job completed job-controller logger.go:42: 08:35:11 | es-index-cleaner-autoprov | Deleting namespace: kuttl-test-organic-monkfish === CONT kuttl/harness/es-from-aio-to-production logger.go:42: 08:35:18 | es-from-aio-to-production | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:35:18 | es-from-aio-to-production | Creating namespace: kuttl-test-assuring-koi logger.go:42: 08:35:18 | es-from-aio-to-production/0-install | starting test step 0-install logger.go:42: 08:35:18 | es-from-aio-to-production/0-install | Jaeger:kuttl-test-assuring-koi/my-jaeger created logger.go:42: 08:35:27 | es-from-aio-to-production/0-install | test step completed 0-install logger.go:42: 08:35:27 | es-from-aio-to-production/1-smoke-test | starting test step 1-smoke-test logger.go:42: 08:35:27 | es-from-aio-to-production/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 08:35:28 | es-from-aio-to-production/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:35:34 | es-from-aio-to-production/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:35:35 | es-from-aio-to-production/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:35:35 | es-from-aio-to-production/1-smoke-test | job.batch/report-span created logger.go:42: 08:35:35 | es-from-aio-to-production/1-smoke-test | job.batch/check-span created logger.go:42: 08:35:46 | es-from-aio-to-production/1-smoke-test | test step completed 1-smoke-test logger.go:42: 08:35:46 | es-from-aio-to-production/3-install | starting test step 3-install logger.go:42: 08:35:46 | es-from-aio-to-production/3-install | Jaeger:kuttl-test-assuring-koi/my-jaeger updated logger.go:42: 08:36:19 | es-from-aio-to-production/3-install | test step completed 3-install logger.go:42: 08:36:19 | es-from-aio-to-production/4-smoke-test | starting test step 4-smoke-test logger.go:42: 08:36:19 | es-from-aio-to-production/4-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 08:36:26 | es-from-aio-to-production/4-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:36:26 | es-from-aio-to-production/4-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:36:27 | es-from-aio-to-production/4-smoke-test | job.batch/report-span unchanged logger.go:42: 08:36:27 | es-from-aio-to-production/4-smoke-test | job.batch/check-span unchanged logger.go:42: 08:36:27 | es-from-aio-to-production/4-smoke-test | test step completed 4-smoke-test logger.go:42: 08:36:27 | es-from-aio-to-production | es-from-aio-to-production events from ns kuttl-test-assuring-koi: logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:21 +0000 UTC Normal Pod my-jaeger-598d45c7d9-268l7 Binding Scheduled Successfully assigned kuttl-test-assuring-koi/my-jaeger-598d45c7d9-268l7 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:21 +0000 UTC Normal ReplicaSet.apps my-jaeger-598d45c7d9 SuccessfulCreate Created pod: my-jaeger-598d45c7d9-268l7 replicaset-controller logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:21 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-598d45c7d9 to 1 deployment-controller logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:22 +0000 UTC Normal Pod my-jaeger-598d45c7d9-268l7 AddedInterface Add eth0 [10.128.2.49/23] from ovn-kubernetes logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:22 +0000 UTC Normal Pod my-jaeger-598d45c7d9-268l7.spec.containers{jaeger} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:25 +0000 UTC Normal Pod my-jaeger-598d45c7d9-268l7.spec.containers{jaeger} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" in 2.55s (2.55s including waiting) kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:25 +0000 UTC Normal Pod my-jaeger-598d45c7d9-268l7.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:25 +0000 UTC Normal Pod my-jaeger-598d45c7d9-268l7.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:25 +0000 UTC Normal Pod my-jaeger-598d45c7d9-268l7.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:25 +0000 UTC Normal Pod my-jaeger-598d45c7d9-268l7.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:25 +0000 UTC Normal Pod my-jaeger-598d45c7d9-268l7.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:29 +0000 UTC Normal Pod my-jaeger-598d45c7d9-268l7.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:29 +0000 UTC Normal Pod my-jaeger-598d45c7d9-268l7.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:29 +0000 UTC Normal ReplicaSet.apps my-jaeger-598d45c7d9 SuccessfulDelete Deleted pod: my-jaeger-598d45c7d9-268l7 replicaset-controller logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:29 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-598d45c7d9 to 0 from 1 deployment-controller logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:30 +0000 UTC Normal Pod my-jaeger-ddb9fcf96-dcn9f Binding Scheduled Successfully assigned kuttl-test-assuring-koi/my-jaeger-ddb9fcf96-dcn9f to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:30 +0000 UTC Normal Pod my-jaeger-ddb9fcf96-dcn9f AddedInterface Add eth0 [10.129.2.36/23] from ovn-kubernetes logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:30 +0000 UTC Normal Pod my-jaeger-ddb9fcf96-dcn9f.spec.containers{jaeger} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:30 +0000 UTC Normal ReplicaSet.apps my-jaeger-ddb9fcf96 SuccessfulCreate Created pod: my-jaeger-ddb9fcf96-dcn9f replicaset-controller logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:30 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-ddb9fcf96 to 1 deployment-controller logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:33 +0000 UTC Normal Pod my-jaeger-ddb9fcf96-dcn9f.spec.containers{jaeger} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" in 2.201s (2.202s including waiting) kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:33 +0000 UTC Normal Pod my-jaeger-ddb9fcf96-dcn9f.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:33 +0000 UTC Normal Pod my-jaeger-ddb9fcf96-dcn9f.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:33 +0000 UTC Normal Pod my-jaeger-ddb9fcf96-dcn9f.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:33 +0000 UTC Normal Pod my-jaeger-ddb9fcf96-dcn9f.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:33 +0000 UTC Normal Pod my-jaeger-ddb9fcf96-dcn9f.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:35 +0000 UTC Normal Pod check-span-h4276 Binding Scheduled Successfully assigned kuttl-test-assuring-koi/check-span-h4276 to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:35 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-h4276 job-controller logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:35 +0000 UTC Normal Pod report-span-cz8d7 Binding Scheduled Successfully assigned kuttl-test-assuring-koi/report-span-cz8d7 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:35 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-cz8d7 job-controller logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:36 +0000 UTC Normal Pod check-span-h4276 AddedInterface Add eth0 [10.131.0.38/23] from ovn-kubernetes logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:36 +0000 UTC Normal Pod check-span-h4276.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:36 +0000 UTC Normal Pod check-span-h4276.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:36 +0000 UTC Normal Pod check-span-h4276.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:36 +0000 UTC Normal Pod report-span-cz8d7 AddedInterface Add eth0 [10.128.2.50/23] from ovn-kubernetes logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:36 +0000 UTC Normal Pod report-span-cz8d7.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:36 +0000 UTC Normal Pod report-span-cz8d7.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:36 +0000 UTC Normal Pod report-span-cz8d7.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:46 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:49 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestassuringkoimyjaeger-1-76db657674 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestassuringkoimyjaeger-1-76db65767spp4v replicaset-controller logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestassuringkoimyjaeger-1-76db65767spp4v Binding Scheduled Successfully assigned kuttl-test-assuring-koi/elasticsearch-cdm-kuttltestassuringkoimyjaeger-1-76db65767spp4v to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:49 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestassuringkoimyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestassuringkoimyjaeger-1-76db657674 to 1 deployment-controller logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:50 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestassuringkoimyjaeger-1-76db65767spp4v AddedInterface Add eth0 [10.128.2.51/23] from ovn-kubernetes logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:50 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestassuringkoimyjaeger-1-76db65767spp4v.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:50 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestassuringkoimyjaeger-1-76db65767spp4v.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:50 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestassuringkoimyjaeger-1-76db65767spp4v.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:50 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestassuringkoimyjaeger-1-76db65767spp4v.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:50 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestassuringkoimyjaeger-1-76db65767spp4v.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:35:50 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestassuringkoimyjaeger-1-76db65767spp4v.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:00 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestassuringkoimyjaeger-1-76db65767spp4v.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:05 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestassuringkoimyjaeger-1-76db65767spp4v.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:08 +0000 UTC Normal Job.batch report-span Completed Job completed job-controller logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:16 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-bvzkv Binding Scheduled Successfully assigned kuttl-test-assuring-koi/my-jaeger-collector-5489f5bd9b-bvzkv to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:16 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5489f5bd9b SuccessfulCreate Created pod: my-jaeger-collector-5489f5bd9b-bvzkv replicaset-controller logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:16 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-5489f5bd9b to 1 deployment-controller logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:16 +0000 UTC Normal Pod my-jaeger-ddb9fcf96-dcn9f.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:16 +0000 UTC Normal Pod my-jaeger-ddb9fcf96-dcn9f.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:16 +0000 UTC Normal Pod my-jaeger-query-796945fb45-nsfgt Binding Scheduled Successfully assigned kuttl-test-assuring-koi/my-jaeger-query-796945fb45-nsfgt to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:16 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-796945fb45 SuccessfulCreate Created pod: my-jaeger-query-796945fb45-nsfgt replicaset-controller logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:16 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-796945fb45 to 1 deployment-controller logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:17 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-bvzkv AddedInterface Add eth0 [10.131.0.39/23] from ovn-kubernetes logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:17 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-bvzkv.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:17 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-bvzkv.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:17 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-bvzkv.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:17 +0000 UTC Normal Pod my-jaeger-query-796945fb45-nsfgt AddedInterface Add eth0 [10.131.0.40/23] from ovn-kubernetes logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:17 +0000 UTC Normal Pod my-jaeger-query-796945fb45-nsfgt.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:17 +0000 UTC Normal Pod my-jaeger-query-796945fb45-nsfgt.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:17 +0000 UTC Normal Pod my-jaeger-query-796945fb45-nsfgt.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:17 +0000 UTC Normal Pod my-jaeger-query-796945fb45-nsfgt.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:17 +0000 UTC Normal Pod my-jaeger-query-796945fb45-nsfgt.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:17 +0000 UTC Normal Pod my-jaeger-query-796945fb45-nsfgt.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:17 +0000 UTC Normal Pod my-jaeger-query-796945fb45-nsfgt.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:17 +0000 UTC Normal Pod my-jaeger-query-796945fb45-nsfgt.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | 2023-11-06 08:36:17 +0000 UTC Normal Pod my-jaeger-query-796945fb45-nsfgt.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:36:27 | es-from-aio-to-production | Deleting namespace: kuttl-test-assuring-koi === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (863.55s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.45s) --- PASS: kuttl/harness/es-multiinstance (103.80s) --- PASS: kuttl/harness/es-streaming-autoprovisioned (229.10s) --- PASS: kuttl/harness/es-simple-prod (5.76s) --- PASS: kuttl/harness/es-rollover-autoprov (209.80s) --- PASS: kuttl/harness/es-increasing-replicas (98.72s) --- PASS: kuttl/harness/es-index-cleaner-autoprov (134.37s) --- PASS: kuttl/harness/es-from-aio-to-production (75.51s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name elasticsearch --report --output /logs/artifacts/elasticsearch.xml ./artifacts/kuttl-report.xml time="2023-11-06T08:36:36Z" level=debug msg="Setting a new name for the test suites" time="2023-11-06T08:36:36Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-11-06T08:36:36Z" level=debug msg="normalizing test case names" time="2023-11-06T08:36:36Z" level=debug msg="elasticsearch/artifacts -> elasticsearch_artifacts" time="2023-11-06T08:36:36Z" level=debug msg="elasticsearch/es-multiinstance -> elasticsearch_es_multiinstance" time="2023-11-06T08:36:36Z" level=debug msg="elasticsearch/es-streaming-autoprovisioned -> elasticsearch_es_streaming_autoprovisioned" time="2023-11-06T08:36:36Z" level=debug msg="elasticsearch/es-simple-prod -> elasticsearch_es_simple_prod" time="2023-11-06T08:36:36Z" level=debug msg="elasticsearch/es-rollover-autoprov -> elasticsearch_es_rollover_autoprov" time="2023-11-06T08:36:36Z" level=debug msg="elasticsearch/es-increasing-replicas -> elasticsearch_es_increasing_replicas" time="2023-11-06T08:36:36Z" level=debug msg="elasticsearch/es-index-cleaner-autoprov -> elasticsearch_es_index_cleaner_autoprov" time="2023-11-06T08:36:36Z" level=debug msg="elasticsearch/es-from-aio-to-production -> elasticsearch_es_from_aio_to_production" +--------------------------------------------+--------+ | NAME | RESULT | +--------------------------------------------+--------+ | elasticsearch_artifacts | passed | | elasticsearch_es_multiinstance | passed | | elasticsearch_es_streaming_autoprovisioned | passed | | elasticsearch_es_simple_prod | passed | | elasticsearch_es_rollover_autoprov | passed | | elasticsearch_es_increasing_replicas | passed | | elasticsearch_es_index_cleaner_autoprov | passed | | elasticsearch_es_from_aio_to_production | passed | +--------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + '[' 0 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh examples false true + '[' 3 -ne 3 ']' + test_suite_name=examples + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/examples.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-examples make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=0.32.0 \ SKIP_KAFKA=false \ VERTX_IMG=jaegertracing/vertx-create-span:operator-e2e-tests \ ./tests/e2e/examples/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-04-120954 True False 22m Cluster version is 4.15.0-0.nightly-2023-11-04-120954' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-04-120954 True False 22m Cluster version is 4.15.0-0.nightly-2023-11-04-120954' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 0.32.0 ']' ++ version_le 0.32.0 0.25.0 +++ echo 0.32.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 0.32.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/examples/render.sh ++ export SUITE_DIR=./tests/e2e/examples ++ SUITE_DIR=./tests/e2e/examples ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/examples ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test examples-agent-as-daemonset + '[' 1 -ne 1 ']' + test_name=examples-agent-as-daemonset + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-as-daemonset' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-as-daemonset\e[0m' Rendering files for test examples-agent-as-daemonset + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build + '[' _build '!=' _build ']' + mkdir -p examples-agent-as-daemonset + cd examples-agent-as-daemonset + example_name=agent-as-daemonset + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + render_install_example agent-as-daemonset 01 + '[' 2 -ne 2 ']' + example_name=agent-as-daemonset + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/agent-as-daemonset.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + JAEGER_NAME=agent-as-daemonset + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=DaemonSet ++ '[' DaemonSet = null ']' ++ echo DaemonSet ++ return 0 + jaeger_strategy=DaemonSet + '[' DaemonSet = DaemonSet ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example agent-as-daemonset 02 + '[' 2 -ne 2 ']' + example_name=agent-as-daemonset + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/agent-as-daemonset.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/agent-as-daemonset.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/agent-as-daemonset.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/agent-as-daemonset.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + jaeger_name=agent-as-daemonset + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test agent-as-daemonset true 02 + '[' 3 -ne 3 ']' + jaeger=agent-as-daemonset + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-agent-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-agent-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-with-priority-class\e[0m' Rendering files for test examples-agent-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-as-daemonset + '[' examples-agent-as-daemonset '!=' _build ']' + cd .. + mkdir -p examples-agent-with-priority-class + cd examples-agent-with-priority-class + example_name=agent-with-priority-class + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + render_install_example agent-with-priority-class 01 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/agent-with-priority-class.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + JAEGER_NAME=agent-as-daemonset + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=DaemonSet ++ '[' DaemonSet = null ']' ++ echo DaemonSet ++ return 0 + jaeger_strategy=DaemonSet + '[' DaemonSet = DaemonSet ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example agent-with-priority-class 02 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + jaeger_name=agent-as-daemonset + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test agent-as-daemonset true 02 + '[' 3 -ne 3 ']' + jaeger=agent-as-daemonset + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-all-in-one-with-options + '[' 1 -ne 1 ']' + test_name=examples-all-in-one-with-options + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-all-in-one-with-options' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-all-in-one-with-options\e[0m' Rendering files for test examples-all-in-one-with-options + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-with-priority-class + '[' examples-agent-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-all-in-one-with-options + cd examples-all-in-one-with-options + example_name=all-in-one-with-options + render_install_example all-in-one-with-options 00 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/all-in-one-with-options.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + JAEGER_NAME=my-jaeger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.metadata.name="my-jaeger"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i 'del(.spec.allInOne.image)' ./00-install.yaml + render_smoke_test_example all-in-one-with-options 01 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + jaeger_name=my-jaeger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + sed -i s~my-jaeger-query:443~my-jaeger-query:443/jaeger~gi ./01-smoke-test.yaml + '[' false = true ']' + start_test examples-auto-provision-kafka + '[' 1 -ne 1 ']' + test_name=examples-auto-provision-kafka + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-auto-provision-kafka' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-auto-provision-kafka\e[0m' Rendering files for test examples-auto-provision-kafka + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-all-in-one-with-options + '[' examples-all-in-one-with-options '!=' _build ']' + cd .. + mkdir -p examples-auto-provision-kafka + cd examples-auto-provision-kafka + example_name=auto-provision-kafka + render_install_kafka_operator 01 + '[' 1 -ne 1 ']' + test_step=01 + '[' true '!=' true ']' + render_install_example auto-provision-kafka 02 + '[' 2 -ne 2 ']' + example_name=auto-provision-kafka + test_step=02 + install_file=./02-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/auto-provision-kafka.yaml -o ./02-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./02-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./02-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./02-install.yaml ++ jaeger_name=auto-provision-kafka ++ '[' -z auto-provision-kafka ']' ++ echo auto-provision-kafka ++ return 0 + JAEGER_NAME=auto-provision-kafka + local jaeger_strategy ++ get_jaeger_strategy ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./02-install.yaml ++ strategy=streaming ++ '[' streaming = production ']' ++ '[' streaming = streaming ']' ++ echo streaming ++ return 0 + jaeger_strategy=streaming + '[' streaming = DaemonSet ']' + '[' streaming = allInOne ']' + '[' streaming = production ']' + '[' streaming = streaming ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/streaming-jaeger-assert.yaml.template -o ./02-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./02-install.yaml + mv ./02-assert.yaml ./05-assert.yaml + render_assert_kafka true auto-provision-kafka 02 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provision-kafka + test_step=02 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provision-kafka + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./02-assert.yaml ++ expr 02 + 1 + CLUSTER_NAME=auto-provision-kafka + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./3-assert.yaml ++ expr 02 + 2 + CLUSTER_NAME=auto-provision-kafka + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./04-assert.yaml + render_smoke_test_example auto-provision-kafka 06 + '[' 2 -ne 2 ']' + example_name=auto-provision-kafka + test_step=06 + deployment_file=/tmp/jaeger-tests/examples/auto-provision-kafka.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/auto-provision-kafka.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/auto-provision-kafka.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/auto-provision-kafka.yaml ++ jaeger_name=auto-provision-kafka ++ '[' -z auto-provision-kafka ']' ++ echo auto-provision-kafka ++ return 0 + jaeger_name=auto-provision-kafka + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test auto-provision-kafka true 06 + '[' 3 -ne 3 ']' + jaeger=auto-provision-kafka + is_secured=true + test_step=06 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://auto-provision-kafka-query:443 + JAEGER_QUERY_ENDPOINT=https://auto-provision-kafka-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://auto-provision-kafka-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://auto-provision-kafka-collector-headless:14268 + export JAEGER_NAME=auto-provision-kafka + JAEGER_NAME=auto-provision-kafka + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./06-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-business-application-injected-sidecar + '[' 1 -ne 1 ']' + test_name=examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-business-application-injected-sidecar' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-business-application-injected-sidecar\e[0m' Rendering files for test examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-auto-provision-kafka + '[' examples-auto-provision-kafka '!=' _build ']' + cd .. + mkdir -p examples-business-application-injected-sidecar + cd examples-business-application-injected-sidecar + example_name=simplest + cp /tmp/jaeger-tests/examples/business-application-injected-sidecar.yaml ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].image=strenv(VERTX_IMG)' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.path="/"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.port=8080' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.initialDelaySeconds=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.failureThreshold=3' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.periodSeconds=10' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.successThreshold=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.timeoutSeconds=1' ./00-install.yaml + render_install_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example simplest 02 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 02 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-collector-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-collector-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-collector-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-collector-with-priority-class\e[0m' Rendering files for test examples-collector-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-business-application-injected-sidecar + '[' examples-business-application-injected-sidecar '!=' _build ']' + cd .. + mkdir -p examples-collector-with-priority-class + cd examples-collector-with-priority-class + example_name=collector-with-priority-class + render_install_example collector-with-priority-class 00 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/collector-with-priority-class.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + JAEGER_NAME=collector-with-high-priority + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example collector-with-priority-class 01 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + jaeger_name=collector-with-high-priority + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test collector-with-high-priority true 01 + '[' 3 -ne 3 ']' + jaeger=collector-with-high-priority + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + export JAEGER_NAME=collector-with-high-priority + JAEGER_NAME=collector-with-high-priority + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-service-types + '[' 1 -ne 1 ']' + test_name=examples-service-types + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-service-types' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-service-types\e[0m' Rendering files for test examples-service-types + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-collector-with-priority-class + '[' examples-collector-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-service-types + cd examples-service-types + example_name=service-types + render_install_example service-types 00 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/service-types.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + JAEGER_NAME=service-types + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example service-types 01 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/service-types.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/service-types.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/service-types.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/service-types.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + jaeger_name=service-types + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test service-types true 01 + '[' 3 -ne 3 ']' + jaeger=service-types + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + export JAEGER_NAME=service-types + JAEGER_NAME=service-types + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod + '[' 1 -ne 1 ']' + test_name=examples-simple-prod + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod\e[0m' Rendering files for test examples-simple-prod + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-service-types + '[' examples-service-types '!=' _build ']' + cd .. + mkdir -p examples-simple-prod + cd examples-simple-prod + example_name=simple-prod + render_install_example simple-prod 01 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod 02 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod-with-volumes + '[' 1 -ne 1 ']' + test_name=examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod-with-volumes' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod-with-volumes\e[0m' Rendering files for test examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod + '[' examples-simple-prod '!=' _build ']' + cd .. + mkdir -p examples-simple-prod-with-volumes + cd examples-simple-prod-with-volumes + example_name=simple-prod-with-volumes + render_install_example simple-prod-with-volumes 01 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod-with-volumes 02 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + /tmp/jaeger-tests/bin/gomplate -f ./03-check-volume.yaml.template -o 03-check-volume.yaml + start_test examples-simplest + '[' 1 -ne 1 ']' + test_name=examples-simplest + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simplest' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simplest\e[0m' Rendering files for test examples-simplest + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod-with-volumes + '[' examples-simple-prod-with-volumes '!=' _build ']' + cd .. + mkdir -p examples-simplest + cd examples-simplest + example_name=simplest + render_install_example simplest 00 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 01 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger + '[' 1 -ne 1 ']' + test_name=examples-with-badger + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger\e[0m' Rendering files for test examples-with-badger + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simplest + '[' examples-simplest '!=' _build ']' + cd .. + mkdir -p examples-with-badger + cd examples-with-badger + example_name=with-badger + render_install_example with-badger 00 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + JAEGER_NAME=with-badger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger 01 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + jaeger_name=with-badger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + export JAEGER_NAME=with-badger + JAEGER_NAME=with-badger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger-and-volume + '[' 1 -ne 1 ']' + test_name=examples-with-badger-and-volume + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger-and-volume' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger-and-volume\e[0m' Rendering files for test examples-with-badger-and-volume + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger + '[' examples-with-badger '!=' _build ']' + cd .. + mkdir -p examples-with-badger-and-volume + cd examples-with-badger-and-volume + example_name=with-badger-and-volume + render_install_example with-badger-and-volume 00 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger-and-volume.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + JAEGER_NAME=with-badger-and-volume + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger-and-volume 01 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + jaeger_name=with-badger-and-volume + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger-and-volume true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger-and-volume + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + export JAEGER_NAME=with-badger-and-volume + JAEGER_NAME=with-badger-and-volume + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-cassandra + '[' 1 -ne 1 ']' + test_name=examples-with-cassandra + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-cassandra' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-cassandra\e[0m' Rendering files for test examples-with-cassandra + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger-and-volume + '[' examples-with-badger-and-volume '!=' _build ']' + cd .. + mkdir -p examples-with-cassandra + cd examples-with-cassandra + example_name=with-cassandra + render_install_cassandra 00 + '[' 1 -ne 1 ']' + test_step=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-assert.yaml.template -o ./00-assert.yaml + render_install_example with-cassandra 01 + '[' 2 -ne 2 ']' + example_name=with-cassandra + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-cassandra.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=with-cassandra ++ '[' -z with-cassandra ']' ++ echo with-cassandra ++ return 0 + JAEGER_NAME=with-cassandra + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example with-cassandra 02 + '[' 2 -ne 2 ']' + example_name=with-cassandra + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/with-cassandra.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-cassandra.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-cassandra.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-cassandra.yaml ++ jaeger_name=with-cassandra ++ '[' -z with-cassandra ']' ++ echo with-cassandra ++ return 0 + jaeger_name=with-cassandra + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-cassandra true 02 + '[' 3 -ne 3 ']' + jaeger=with-cassandra + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 + JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 + export JAEGER_NAME=with-cassandra + JAEGER_NAME=with-cassandra + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-sampling + '[' 1 -ne 1 ']' + test_name=examples-with-sampling + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-sampling' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-sampling\e[0m' Rendering files for test examples-with-sampling + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-cassandra + '[' examples-with-cassandra '!=' _build ']' + cd .. + mkdir -p examples-with-sampling + cd examples-with-sampling + export example_name=with-sampling + example_name=with-sampling + render_install_cassandra 00 + '[' 1 -ne 1 ']' + test_step=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-assert.yaml.template -o ./00-assert.yaml + render_install_example with-sampling 01 + '[' 2 -ne 2 ']' + example_name=with-sampling + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-sampling.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=with-sampling ++ '[' -z with-sampling ']' ++ echo with-sampling ++ return 0 + JAEGER_NAME=with-sampling + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example with-sampling 02 + '[' 2 -ne 2 ']' + example_name=with-sampling + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/with-sampling.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-sampling.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-sampling.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-sampling.yaml ++ jaeger_name=with-sampling ++ '[' -z with-sampling ']' ++ echo with-sampling ++ return 0 + jaeger_name=with-sampling + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-sampling true 02 + '[' 3 -ne 3 ']' + jaeger=with-sampling + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 + JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 + export JAEGER_NAME=with-sampling + JAEGER_NAME=with-sampling + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + start_test examples-openshift-agent-as-daemonset + '[' 1 -ne 1 ']' + test_name=examples-openshift-agent-as-daemonset + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-openshift-agent-as-daemonset' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-openshift-agent-as-daemonset\e[0m' Rendering files for test examples-openshift-agent-as-daemonset + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-sampling + '[' examples-with-sampling '!=' _build ']' + cd .. + mkdir -p examples-openshift-agent-as-daemonset + cd examples-openshift-agent-as-daemonset + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/agent-as-daemonset.yaml -o 02-install.yaml + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./02-assert.yaml + render_install_vertx 03 + '[' 1 -ne 1 ']' + test_step=03 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./03-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].env=[{"name": "JAEGER_AGENT_HOST", "valueFrom": {"fieldRef": {"apiVersion": "v1", "fieldPath": "status.hostIP"}}}]' ./03-install.yaml + render_find_service agent-as-daemonset production order 00 04 + '[' 5 -ne 5 ']' + jaeger=agent-as-daemonset + deployment_strategy=production + service_name=order + job_number=00 + test_step=04 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' production '!=' allInOne ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/find-service.yaml.template -o ./04-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + start_test examples-openshift-with-htpasswd + '[' 1 -ne 1 ']' + test_name=examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-openshift-with-htpasswd' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-openshift-with-htpasswd\e[0m' Rendering files for test examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-openshift-agent-as-daemonset + '[' examples-openshift-agent-as-daemonset '!=' _build ']' + cd .. + mkdir -p examples-openshift-with-htpasswd + cd examples-openshift-with-htpasswd + export JAEGER_NAME=with-htpasswd + JAEGER_NAME=with-htpasswd + export JAEGER_USERNAME=awesomeuser + JAEGER_USERNAME=awesomeuser + export JAEGER_PASSWORD=awesomepassword + JAEGER_PASSWORD=awesomepassword + export 'JAEGER_USER_PASSWORD_HASH=awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' + JAEGER_USER_PASSWORD_HASH='awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ echo 'awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ base64 + SECRET=YXdlc29tZXVzZXI6e1NIQX11VWRxUFZVeXFOQm1FUlUwUXhqM0tGYVpuanc9Cg== + /tmp/jaeger-tests/bin/gomplate -f ./00-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/with-htpasswd.yaml -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + export 'GET_URL_COMMAND=kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + export 'URL=https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + INSECURE=true + JAEGER_USERNAME= + JAEGER_PASSWORD= + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-unsecured.yaml + JAEGER_USERNAME=wronguser + JAEGER_PASSWORD=wrongpassword + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-check-unauthorized.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./04-check-authorized.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running examples E2E tests' Running examples E2E tests + cd tests/e2e/examples/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1891468343 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 17 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/examples-agent-as-daemonset === PAUSE kuttl/harness/examples-agent-as-daemonset === RUN kuttl/harness/examples-agent-with-priority-class === PAUSE kuttl/harness/examples-agent-with-priority-class === RUN kuttl/harness/examples-all-in-one-with-options === PAUSE kuttl/harness/examples-all-in-one-with-options === RUN kuttl/harness/examples-auto-provision-kafka === PAUSE kuttl/harness/examples-auto-provision-kafka === RUN kuttl/harness/examples-business-application-injected-sidecar === PAUSE kuttl/harness/examples-business-application-injected-sidecar === RUN kuttl/harness/examples-collector-with-priority-class === PAUSE kuttl/harness/examples-collector-with-priority-class === RUN kuttl/harness/examples-openshift-agent-as-daemonset === PAUSE kuttl/harness/examples-openshift-agent-as-daemonset === RUN kuttl/harness/examples-openshift-with-htpasswd === PAUSE kuttl/harness/examples-openshift-with-htpasswd === RUN kuttl/harness/examples-service-types === PAUSE kuttl/harness/examples-service-types === RUN kuttl/harness/examples-simple-prod === PAUSE kuttl/harness/examples-simple-prod === RUN kuttl/harness/examples-simple-prod-with-volumes === PAUSE kuttl/harness/examples-simple-prod-with-volumes === RUN kuttl/harness/examples-simplest === PAUSE kuttl/harness/examples-simplest === RUN kuttl/harness/examples-with-badger === PAUSE kuttl/harness/examples-with-badger === RUN kuttl/harness/examples-with-badger-and-volume === PAUSE kuttl/harness/examples-with-badger-and-volume === RUN kuttl/harness/examples-with-cassandra === PAUSE kuttl/harness/examples-with-cassandra === RUN kuttl/harness/examples-with-sampling === PAUSE kuttl/harness/examples-with-sampling === CONT kuttl/harness/artifacts logger.go:42: 08:37:14 | artifacts | Creating namespace: kuttl-test-trusty-caiman logger.go:42: 08:37:14 | artifacts | artifacts events from ns kuttl-test-trusty-caiman: logger.go:42: 08:37:14 | artifacts | Deleting namespace: kuttl-test-trusty-caiman === CONT kuttl/harness/examples-service-types logger.go:42: 08:37:19 | examples-service-types | Creating namespace: kuttl-test-definite-elephant logger.go:42: 08:37:19 | examples-service-types/0-install | starting test step 0-install logger.go:42: 08:37:19 | examples-service-types/0-install | Jaeger:kuttl-test-definite-elephant/service-types created logger.go:42: 08:37:25 | examples-service-types/0-install | test step completed 0-install logger.go:42: 08:37:25 | examples-service-types/1-smoke-test | starting test step 1-smoke-test logger.go:42: 08:37:25 | examples-service-types/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE service-types /dev/null] logger.go:42: 08:37:27 | examples-service-types/1-smoke-test | Warning: resource jaegers/service-types is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:37:33 | examples-service-types/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://service-types-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:37:34 | examples-service-types/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:37:34 | examples-service-types/1-smoke-test | job.batch/report-span created logger.go:42: 08:37:34 | examples-service-types/1-smoke-test | job.batch/check-span created logger.go:42: 08:37:46 | examples-service-types/1-smoke-test | test step completed 1-smoke-test logger.go:42: 08:37:46 | examples-service-types/2- | starting test step 2- logger.go:42: 08:37:46 | examples-service-types/2- | test step completed 2- logger.go:42: 08:37:46 | examples-service-types | examples-service-types events from ns kuttl-test-definite-elephant: logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:23 +0000 UTC Normal Pod service-types-9b94b5c67-g8bmc Binding Scheduled Successfully assigned kuttl-test-definite-elephant/service-types-9b94b5c67-g8bmc to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:23 +0000 UTC Normal Pod service-types-9b94b5c67-g8bmc AddedInterface Add eth0 [10.128.2.52/23] from ovn-kubernetes logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:23 +0000 UTC Normal Pod service-types-9b94b5c67-g8bmc.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:23 +0000 UTC Normal ReplicaSet.apps service-types-9b94b5c67 SuccessfulCreate Created pod: service-types-9b94b5c67-g8bmc replicaset-controller logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:23 +0000 UTC Normal Service service-types-collector EnsuringLoadBalancer Ensuring load balancer service-controller logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:23 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-9b94b5c67 to 1 deployment-controller logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:24 +0000 UTC Normal Pod service-types-9b94b5c67-g8bmc.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:24 +0000 UTC Normal Pod service-types-9b94b5c67-g8bmc.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:24 +0000 UTC Normal Pod service-types-9b94b5c67-g8bmc.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:24 +0000 UTC Normal Pod service-types-9b94b5c67-g8bmc.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:24 +0000 UTC Normal Pod service-types-9b94b5c67-g8bmc.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:26 +0000 UTC Normal Service service-types-collector EnsuredLoadBalancer Ensured load balancer service-controller logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:26 +0000 UTC Normal Service service-types-query EnsuringLoadBalancer Ensuring load balancer service-controller logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:30 +0000 UTC Normal Pod service-types-9b94b5c67-g8bmc.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:30 +0000 UTC Normal Pod service-types-9b94b5c67-g8bmc.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:30 +0000 UTC Normal ReplicaSet.apps service-types-9b94b5c67 SuccessfulDelete Deleted pod: service-types-9b94b5c67-g8bmc replicaset-controller logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:30 +0000 UTC Normal Service service-types-query EnsuredLoadBalancer Ensured load balancer service-controller logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:30 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled down replica set service-types-9b94b5c67 to 0 from 1 deployment-controller logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:31 +0000 UTC Normal Pod service-types-7ff574f59d-rztvp Binding Scheduled Successfully assigned kuttl-test-definite-elephant/service-types-7ff574f59d-rztvp to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:31 +0000 UTC Normal ReplicaSet.apps service-types-7ff574f59d SuccessfulCreate Created pod: service-types-7ff574f59d-rztvp replicaset-controller logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:31 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-7ff574f59d to 1 deployment-controller logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:32 +0000 UTC Normal Pod service-types-7ff574f59d-rztvp AddedInterface Add eth0 [10.128.2.53/23] from ovn-kubernetes logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:32 +0000 UTC Normal Pod service-types-7ff574f59d-rztvp.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:32 +0000 UTC Normal Pod service-types-7ff574f59d-rztvp.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:32 +0000 UTC Normal Pod service-types-7ff574f59d-rztvp.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:32 +0000 UTC Normal Pod service-types-7ff574f59d-rztvp.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:32 +0000 UTC Normal Pod service-types-7ff574f59d-rztvp.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:32 +0000 UTC Normal Pod service-types-7ff574f59d-rztvp.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:34 +0000 UTC Normal Pod check-span-rctrr Binding Scheduled Successfully assigned kuttl-test-definite-elephant/check-span-rctrr to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:34 +0000 UTC Normal Pod check-span-rctrr AddedInterface Add eth0 [10.131.0.41/23] from ovn-kubernetes logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:34 +0000 UTC Normal Pod check-span-rctrr.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:34 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-rctrr job-controller logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:34 +0000 UTC Normal Pod report-span-h2z94 Binding Scheduled Successfully assigned kuttl-test-definite-elephant/report-span-h2z94 to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:34 +0000 UTC Normal Pod report-span-h2z94 AddedInterface Add eth0 [10.129.2.37/23] from ovn-kubernetes logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:34 +0000 UTC Normal Pod report-span-h2z94.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:34 +0000 UTC Normal Pod report-span-h2z94.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:34 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-h2z94 job-controller logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:35 +0000 UTC Normal Pod check-span-rctrr.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:35 +0000 UTC Normal Pod check-span-rctrr.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:35 +0000 UTC Normal Pod report-span-h2z94.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:37:46 | examples-service-types | 2023-11-06 08:37:45 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:37:46 | examples-service-types | Deleting namespace: kuttl-test-definite-elephant === CONT kuttl/harness/examples-with-sampling logger.go:42: 08:38:13 | examples-with-sampling | Creating namespace: kuttl-test-moving-wasp logger.go:42: 08:38:13 | examples-with-sampling/0-install | starting test step 0-install logger.go:42: 08:38:13 | examples-with-sampling/0-install | running command: [sh -c cd /tmp/jaeger-tests && make cassandra STORAGE_NAMESPACE=$NAMESPACE] logger.go:42: 08:38:13 | examples-with-sampling/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 08:38:13 | examples-with-sampling/0-install | >>>> Creating namespace kuttl-test-moving-wasp logger.go:42: 08:38:13 | examples-with-sampling/0-install | kubectl create namespace kuttl-test-moving-wasp 2>&1 | grep -v "already exists" || true logger.go:42: 08:38:13 | examples-with-sampling/0-install | kubectl create -f ./tests/cassandra.yml --namespace kuttl-test-moving-wasp 2>&1 | grep -v "already exists" || true logger.go:42: 08:38:13 | examples-with-sampling/0-install | service/cassandra created logger.go:42: 08:38:13 | examples-with-sampling/0-install | statefulset.apps/cassandra created logger.go:42: 08:38:13 | examples-with-sampling/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 08:38:24 | examples-with-sampling/0-install | test step completed 0-install logger.go:42: 08:38:24 | examples-with-sampling/1-install | starting test step 1-install logger.go:42: 08:38:25 | examples-with-sampling/1-install | Jaeger:kuttl-test-moving-wasp/with-sampling created logger.go:42: 08:38:31 | examples-with-sampling/1-install | test step completed 1-install logger.go:42: 08:38:31 | examples-with-sampling/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:38:31 | examples-with-sampling/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-sampling /dev/null] logger.go:42: 08:38:32 | examples-with-sampling/2-smoke-test | Warning: resource jaegers/with-sampling is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:38:38 | examples-with-sampling/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:38:38 | examples-with-sampling/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:38:39 | examples-with-sampling/2-smoke-test | job.batch/report-span created logger.go:42: 08:38:39 | examples-with-sampling/2-smoke-test | job.batch/check-span created logger.go:42: 08:38:50 | examples-with-sampling/2-smoke-test | test step completed 2-smoke-test logger.go:42: 08:38:50 | examples-with-sampling/3- | starting test step 3- logger.go:42: 08:38:50 | examples-with-sampling/3- | test step completed 3- logger.go:42: 08:38:50 | examples-with-sampling | examples-with-sampling events from ns kuttl-test-moving-wasp: logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:13 +0000 UTC Normal Pod cassandra-0 Binding Scheduled Successfully assigned kuttl-test-moving-wasp/cassandra-0 to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:13 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-0 in StatefulSet cassandra successful statefulset-controller logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:14 +0000 UTC Normal Pod cassandra-0 AddedInterface Add eth0 [10.129.2.38/23] from ovn-kubernetes logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:14 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulling Pulling image "cassandra:3.11" kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:18 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulled Successfully pulled image "cassandra:3.11" in 4.092s (4.092s including waiting) kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:18 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:18 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:19 +0000 UTC Normal Pod cassandra-1 Binding Scheduled Successfully assigned kuttl-test-moving-wasp/cassandra-1 to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:19 +0000 UTC Normal Pod cassandra-1 AddedInterface Add eth0 [10.131.0.42/23] from ovn-kubernetes logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:19 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulling Pulling image "cassandra:3.11" kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:19 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-1 in StatefulSet cassandra successful statefulset-controller logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:23 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulled Successfully pulled image "cassandra:3.11" in 3.971s (3.971s including waiting) kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:23 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:23 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:28 +0000 UTC Normal Pod with-sampling-5c56765597-wglmt Binding Scheduled Successfully assigned kuttl-test-moving-wasp/with-sampling-5c56765597-wglmt to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:28 +0000 UTC Warning Pod with-sampling-5c56765597-wglmt FailedMount MountVolume.SetUp failed for volume "with-sampling-collector-tls-config-volume" : secret "with-sampling-collector-headless-tls" not found kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:28 +0000 UTC Normal ReplicaSet.apps with-sampling-5c56765597 SuccessfulCreate Created pod: with-sampling-5c56765597-wglmt replicaset-controller logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:28 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled up replica set with-sampling-5c56765597 to 1 deployment-controller logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:29 +0000 UTC Normal Pod with-sampling-5c56765597-wglmt AddedInterface Add eth0 [10.128.2.54/23] from ovn-kubernetes logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:29 +0000 UTC Normal Pod with-sampling-5c56765597-wglmt.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:29 +0000 UTC Normal Pod with-sampling-5c56765597-wglmt.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:29 +0000 UTC Normal Pod with-sampling-5c56765597-wglmt.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:29 +0000 UTC Normal Pod with-sampling-5c56765597-wglmt.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:30 +0000 UTC Normal Pod with-sampling-5c56765597-wglmt.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:30 +0000 UTC Normal Pod with-sampling-5c56765597-wglmt.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:35 +0000 UTC Normal Pod with-sampling-5c56765597-wglmt.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:35 +0000 UTC Normal Pod with-sampling-5c56765597-wglmt.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:35 +0000 UTC Normal ReplicaSet.apps with-sampling-5c56765597 SuccessfulDelete Deleted pod: with-sampling-5c56765597-wglmt replicaset-controller logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:35 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled down replica set with-sampling-5c56765597 to 0 from 1 deployment-controller logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:36 +0000 UTC Normal Pod with-sampling-7f966bc7f8-bmr99 Binding Scheduled Successfully assigned kuttl-test-moving-wasp/with-sampling-7f966bc7f8-bmr99 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:36 +0000 UTC Normal Pod with-sampling-7f966bc7f8-bmr99 AddedInterface Add eth0 [10.128.2.55/23] from ovn-kubernetes logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:36 +0000 UTC Normal Pod with-sampling-7f966bc7f8-bmr99.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:36 +0000 UTC Normal Pod with-sampling-7f966bc7f8-bmr99.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:36 +0000 UTC Normal Pod with-sampling-7f966bc7f8-bmr99.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:36 +0000 UTC Normal Pod with-sampling-7f966bc7f8-bmr99.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:36 +0000 UTC Normal Pod with-sampling-7f966bc7f8-bmr99.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:36 +0000 UTC Normal Pod with-sampling-7f966bc7f8-bmr99.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:36 +0000 UTC Normal ReplicaSet.apps with-sampling-7f966bc7f8 SuccessfulCreate Created pod: with-sampling-7f966bc7f8-bmr99 replicaset-controller logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:36 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled up replica set with-sampling-7f966bc7f8 to 1 deployment-controller logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:39 +0000 UTC Normal Pod check-span-rjn88 Binding Scheduled Successfully assigned kuttl-test-moving-wasp/check-span-rjn88 to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:39 +0000 UTC Normal Pod check-span-rjn88 AddedInterface Add eth0 [10.131.0.43/23] from ovn-kubernetes logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:39 +0000 UTC Normal Pod check-span-rjn88.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:39 +0000 UTC Normal Pod check-span-rjn88.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:39 +0000 UTC Normal Pod check-span-rjn88.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:39 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-rjn88 job-controller logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:39 +0000 UTC Normal Pod report-span-d4b76 Binding Scheduled Successfully assigned kuttl-test-moving-wasp/report-span-d4b76 to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:39 +0000 UTC Normal Pod report-span-d4b76 AddedInterface Add eth0 [10.129.2.39/23] from ovn-kubernetes logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:39 +0000 UTC Normal Pod report-span-d4b76.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:39 +0000 UTC Normal Pod report-span-d4b76.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:39 +0000 UTC Normal Pod report-span-d4b76.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:39 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-d4b76 job-controller logger.go:42: 08:38:50 | examples-with-sampling | 2023-11-06 08:38:50 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:38:50 | examples-with-sampling | Deleting namespace: kuttl-test-moving-wasp === CONT kuttl/harness/examples-with-cassandra logger.go:42: 08:39:47 | examples-with-cassandra | Creating namespace: kuttl-test-sought-gorilla logger.go:42: 08:39:47 | examples-with-cassandra/0-install | starting test step 0-install logger.go:42: 08:39:47 | examples-with-cassandra/0-install | running command: [sh -c cd /tmp/jaeger-tests && make cassandra STORAGE_NAMESPACE=$NAMESPACE] logger.go:42: 08:39:47 | examples-with-cassandra/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 08:39:47 | examples-with-cassandra/0-install | >>>> Creating namespace kuttl-test-sought-gorilla logger.go:42: 08:39:47 | examples-with-cassandra/0-install | kubectl create namespace kuttl-test-sought-gorilla 2>&1 | grep -v "already exists" || true logger.go:42: 08:39:47 | examples-with-cassandra/0-install | kubectl create -f ./tests/cassandra.yml --namespace kuttl-test-sought-gorilla 2>&1 | grep -v "already exists" || true logger.go:42: 08:39:47 | examples-with-cassandra/0-install | service/cassandra created logger.go:42: 08:39:47 | examples-with-cassandra/0-install | statefulset.apps/cassandra created logger.go:42: 08:39:47 | examples-with-cassandra/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 08:39:49 | examples-with-cassandra/0-install | test step completed 0-install logger.go:42: 08:39:49 | examples-with-cassandra/1-install | starting test step 1-install logger.go:42: 08:39:49 | examples-with-cassandra/1-install | Jaeger:kuttl-test-sought-gorilla/with-cassandra created logger.go:42: 08:40:08 | examples-with-cassandra/1-install | test step completed 1-install logger.go:42: 08:40:08 | examples-with-cassandra/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:40:08 | examples-with-cassandra/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-cassandra /dev/null] logger.go:42: 08:40:10 | examples-with-cassandra/2-smoke-test | Warning: resource jaegers/with-cassandra is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:40:16 | examples-with-cassandra/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:40:16 | examples-with-cassandra/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:40:16 | examples-with-cassandra/2-smoke-test | job.batch/report-span created logger.go:42: 08:40:16 | examples-with-cassandra/2-smoke-test | job.batch/check-span created logger.go:42: 08:40:29 | examples-with-cassandra/2-smoke-test | test step completed 2-smoke-test logger.go:42: 08:40:29 | examples-with-cassandra | examples-with-cassandra events from ns kuttl-test-sought-gorilla: logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:39:47 +0000 UTC Normal Pod cassandra-0 Binding Scheduled Successfully assigned kuttl-test-sought-gorilla/cassandra-0 to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:39:47 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-0 in StatefulSet cassandra successful statefulset-controller logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:39:48 +0000 UTC Normal Pod cassandra-0 AddedInterface Add eth0 [10.129.2.40/23] from ovn-kubernetes logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:39:48 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulled Container image "cassandra:3.11" already present on machine kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:39:48 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:39:48 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:39:48 +0000 UTC Normal Pod cassandra-1 Binding Scheduled Successfully assigned kuttl-test-sought-gorilla/cassandra-1 to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:39:48 +0000 UTC Normal Pod cassandra-1 AddedInterface Add eth0 [10.131.0.44/23] from ovn-kubernetes logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:39:48 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulled Container image "cassandra:3.11" already present on machine kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:39:48 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:39:48 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-1 in StatefulSet cassandra successful statefulset-controller logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:39:49 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:39:52 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-ks7vw Binding Scheduled Successfully assigned kuttl-test-sought-gorilla/with-cassandra-cassandra-schema-job-ks7vw to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:39:52 +0000 UTC Normal Job.batch with-cassandra-cassandra-schema-job SuccessfulCreate Created pod: with-cassandra-cassandra-schema-job-ks7vw job-controller logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:39:53 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-ks7vw AddedInterface Add eth0 [10.128.2.56/23] from ovn-kubernetes logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:39:53 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-ks7vw.spec.containers{with-cassandra-cassandra-schema-job} Pulling Pulling image "jaegertracing/jaeger-cassandra-schema:1.47.0" kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:39:58 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-ks7vw.spec.containers{with-cassandra-cassandra-schema-job} Pulled Successfully pulled image "jaegertracing/jaeger-cassandra-schema:1.47.0" in 4.697s (4.697s including waiting) kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:39:58 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-ks7vw.spec.containers{with-cassandra-cassandra-schema-job} Created Created container with-cassandra-cassandra-schema-job kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:39:58 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-ks7vw.spec.containers{with-cassandra-cassandra-schema-job} Started Started container with-cassandra-cassandra-schema-job kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:05 +0000 UTC Normal Job.batch with-cassandra-cassandra-schema-job Completed Job completed job-controller logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:06 +0000 UTC Normal Pod with-cassandra-549d578b7b-hnxss Binding Scheduled Successfully assigned kuttl-test-sought-gorilla/with-cassandra-549d578b7b-hnxss to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:06 +0000 UTC Normal Pod with-cassandra-549d578b7b-hnxss AddedInterface Add eth0 [10.128.2.57/23] from ovn-kubernetes logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:06 +0000 UTC Normal Pod with-cassandra-549d578b7b-hnxss.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:06 +0000 UTC Normal Pod with-cassandra-549d578b7b-hnxss.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:06 +0000 UTC Normal Pod with-cassandra-549d578b7b-hnxss.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:06 +0000 UTC Normal Pod with-cassandra-549d578b7b-hnxss.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:06 +0000 UTC Normal Pod with-cassandra-549d578b7b-hnxss.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:06 +0000 UTC Normal Pod with-cassandra-549d578b7b-hnxss.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:06 +0000 UTC Normal ReplicaSet.apps with-cassandra-549d578b7b SuccessfulCreate Created pod: with-cassandra-549d578b7b-hnxss replicaset-controller logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:06 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled up replica set with-cassandra-549d578b7b to 1 deployment-controller logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:11 +0000 UTC Normal Pod with-cassandra-549d578b7b-hnxss.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:11 +0000 UTC Normal Pod with-cassandra-549d578b7b-hnxss.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:11 +0000 UTC Normal ReplicaSet.apps with-cassandra-549d578b7b SuccessfulDelete Deleted pod: with-cassandra-549d578b7b-hnxss replicaset-controller logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:11 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled down replica set with-cassandra-549d578b7b to 0 from 1 deployment-controller logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:12 +0000 UTC Normal Pod with-cassandra-68cb4997b6-n7vxc Binding Scheduled Successfully assigned kuttl-test-sought-gorilla/with-cassandra-68cb4997b6-n7vxc to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:12 +0000 UTC Normal Pod with-cassandra-68cb4997b6-n7vxc AddedInterface Add eth0 [10.128.2.58/23] from ovn-kubernetes logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:12 +0000 UTC Normal Pod with-cassandra-68cb4997b6-n7vxc.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:12 +0000 UTC Normal Pod with-cassandra-68cb4997b6-n7vxc.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:12 +0000 UTC Normal Pod with-cassandra-68cb4997b6-n7vxc.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:12 +0000 UTC Normal Pod with-cassandra-68cb4997b6-n7vxc.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:12 +0000 UTC Normal Pod with-cassandra-68cb4997b6-n7vxc.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:12 +0000 UTC Normal Pod with-cassandra-68cb4997b6-n7vxc.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:12 +0000 UTC Normal ReplicaSet.apps with-cassandra-68cb4997b6 SuccessfulCreate Created pod: with-cassandra-68cb4997b6-n7vxc replicaset-controller logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:12 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled up replica set with-cassandra-68cb4997b6 to 1 deployment-controller logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:16 +0000 UTC Normal Pod report-span-9658z Binding Scheduled Successfully assigned kuttl-test-sought-gorilla/report-span-9658z to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:16 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-9658z job-controller logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:17 +0000 UTC Normal Pod check-span-htp6x Binding Scheduled Successfully assigned kuttl-test-sought-gorilla/check-span-htp6x to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:17 +0000 UTC Normal Pod check-span-htp6x AddedInterface Add eth0 [10.131.0.45/23] from ovn-kubernetes logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:17 +0000 UTC Normal Pod check-span-htp6x.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:17 +0000 UTC Normal Pod check-span-htp6x.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:17 +0000 UTC Normal Pod check-span-htp6x.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:17 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-htp6x job-controller logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:17 +0000 UTC Normal Pod report-span-9658z AddedInterface Add eth0 [10.129.2.41/23] from ovn-kubernetes logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:17 +0000 UTC Normal Pod report-span-9658z.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:17 +0000 UTC Normal Pod report-span-9658z.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:17 +0000 UTC Normal Pod report-span-9658z.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:40:29 | examples-with-cassandra | 2023-11-06 08:40:28 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:40:29 | examples-with-cassandra | Deleting namespace: kuttl-test-sought-gorilla === CONT kuttl/harness/examples-with-badger-and-volume logger.go:42: 08:40:40 | examples-with-badger-and-volume | Creating namespace: kuttl-test-live-mite logger.go:42: 08:40:40 | examples-with-badger-and-volume/0-install | starting test step 0-install logger.go:42: 08:40:41 | examples-with-badger-and-volume/0-install | Jaeger:kuttl-test-live-mite/with-badger-and-volume created logger.go:42: 08:40:47 | examples-with-badger-and-volume/0-install | test step completed 0-install logger.go:42: 08:40:47 | examples-with-badger-and-volume/1-smoke-test | starting test step 1-smoke-test logger.go:42: 08:40:47 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-badger-and-volume /dev/null] logger.go:42: 08:40:49 | examples-with-badger-and-volume/1-smoke-test | Warning: resource jaegers/with-badger-and-volume is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:40:55 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:40:56 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:40:57 | examples-with-badger-and-volume/1-smoke-test | job.batch/report-span created logger.go:42: 08:40:57 | examples-with-badger-and-volume/1-smoke-test | job.batch/check-span created logger.go:42: 08:41:09 | examples-with-badger-and-volume/1-smoke-test | test step completed 1-smoke-test logger.go:42: 08:41:09 | examples-with-badger-and-volume | examples-with-badger-and-volume events from ns kuttl-test-live-mite: logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:44 +0000 UTC Normal Pod with-badger-and-volume-668796d567-p46b4 Binding Scheduled Successfully assigned kuttl-test-live-mite/with-badger-and-volume-668796d567-p46b4 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:44 +0000 UTC Normal ReplicaSet.apps with-badger-and-volume-668796d567 SuccessfulCreate Created pod: with-badger-and-volume-668796d567-p46b4 replicaset-controller logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:44 +0000 UTC Normal Deployment.apps with-badger-and-volume ScalingReplicaSet Scaled up replica set with-badger-and-volume-668796d567 to 1 deployment-controller logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:45 +0000 UTC Normal Pod with-badger-and-volume-668796d567-p46b4 AddedInterface Add eth0 [10.128.2.59/23] from ovn-kubernetes logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:45 +0000 UTC Normal Pod with-badger-and-volume-668796d567-p46b4.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:45 +0000 UTC Normal Pod with-badger-and-volume-668796d567-p46b4.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:45 +0000 UTC Normal Pod with-badger-and-volume-668796d567-p46b4.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:45 +0000 UTC Normal Pod with-badger-and-volume-668796d567-p46b4.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:45 +0000 UTC Normal Pod with-badger-and-volume-668796d567-p46b4.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:45 +0000 UTC Normal Pod with-badger-and-volume-668796d567-p46b4.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:51 +0000 UTC Normal Pod with-badger-and-volume-668796d567-p46b4.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:51 +0000 UTC Normal Pod with-badger-and-volume-668796d567-p46b4.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:51 +0000 UTC Normal ReplicaSet.apps with-badger-and-volume-668796d567 SuccessfulDelete Deleted pod: with-badger-and-volume-668796d567-p46b4 replicaset-controller logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:51 +0000 UTC Normal Deployment.apps with-badger-and-volume ScalingReplicaSet Scaled down replica set with-badger-and-volume-668796d567 to 0 from 1 deployment-controller logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:52 +0000 UTC Normal Pod with-badger-and-volume-5f9f4c9f74-zswq8 Binding Scheduled Successfully assigned kuttl-test-live-mite/with-badger-and-volume-5f9f4c9f74-zswq8 to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:52 +0000 UTC Normal Pod with-badger-and-volume-5f9f4c9f74-zswq8 AddedInterface Add eth0 [10.129.2.42/23] from ovn-kubernetes logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:52 +0000 UTC Normal Pod with-badger-and-volume-5f9f4c9f74-zswq8.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:52 +0000 UTC Normal Pod with-badger-and-volume-5f9f4c9f74-zswq8.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:52 +0000 UTC Normal Pod with-badger-and-volume-5f9f4c9f74-zswq8.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:52 +0000 UTC Normal Pod with-badger-and-volume-5f9f4c9f74-zswq8.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:52 +0000 UTC Normal ReplicaSet.apps with-badger-and-volume-5f9f4c9f74 SuccessfulCreate Created pod: with-badger-and-volume-5f9f4c9f74-zswq8 replicaset-controller logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:52 +0000 UTC Normal Deployment.apps with-badger-and-volume ScalingReplicaSet Scaled up replica set with-badger-and-volume-5f9f4c9f74 to 1 deployment-controller logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:53 +0000 UTC Normal Pod with-badger-and-volume-5f9f4c9f74-zswq8.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:53 +0000 UTC Normal Pod with-badger-and-volume-5f9f4c9f74-zswq8.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:57 +0000 UTC Normal Pod check-span-rf5wk Binding Scheduled Successfully assigned kuttl-test-live-mite/check-span-rf5wk to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:57 +0000 UTC Normal Pod check-span-rf5wk AddedInterface Add eth0 [10.131.0.46/23] from ovn-kubernetes logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:57 +0000 UTC Normal Pod check-span-rf5wk.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:57 +0000 UTC Normal Pod check-span-rf5wk.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:57 +0000 UTC Normal Pod check-span-rf5wk.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:57 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-rf5wk job-controller logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:57 +0000 UTC Normal Pod report-span-h4rsf Binding Scheduled Successfully assigned kuttl-test-live-mite/report-span-h4rsf to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:57 +0000 UTC Normal Pod report-span-h4rsf AddedInterface Add eth0 [10.128.2.60/23] from ovn-kubernetes logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:57 +0000 UTC Normal Pod report-span-h4rsf.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:57 +0000 UTC Normal Pod report-span-h4rsf.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:57 +0000 UTC Normal Pod report-span-h4rsf.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:40:57 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-h4rsf job-controller logger.go:42: 08:41:09 | examples-with-badger-and-volume | 2023-11-06 08:41:08 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:41:09 | examples-with-badger-and-volume | Deleting namespace: kuttl-test-live-mite === CONT kuttl/harness/examples-with-badger logger.go:42: 08:41:20 | examples-with-badger | Creating namespace: kuttl-test-trusting-mallard logger.go:42: 08:41:20 | examples-with-badger/0-install | starting test step 0-install logger.go:42: 08:41:20 | examples-with-badger/0-install | Jaeger:kuttl-test-trusting-mallard/with-badger created logger.go:42: 08:41:26 | examples-with-badger/0-install | test step completed 0-install logger.go:42: 08:41:26 | examples-with-badger/1-smoke-test | starting test step 1-smoke-test logger.go:42: 08:41:26 | examples-with-badger/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-badger /dev/null] logger.go:42: 08:41:28 | examples-with-badger/1-smoke-test | Warning: resource jaegers/with-badger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:41:34 | examples-with-badger/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:41:35 | examples-with-badger/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:41:35 | examples-with-badger/1-smoke-test | job.batch/report-span created logger.go:42: 08:41:35 | examples-with-badger/1-smoke-test | job.batch/check-span created logger.go:42: 08:41:46 | examples-with-badger/1-smoke-test | test step completed 1-smoke-test logger.go:42: 08:41:46 | examples-with-badger | examples-with-badger events from ns kuttl-test-trusting-mallard: logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:24 +0000 UTC Normal Pod with-badger-677fd77d4d-srf9w Binding Scheduled Successfully assigned kuttl-test-trusting-mallard/with-badger-677fd77d4d-srf9w to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:24 +0000 UTC Normal Pod with-badger-677fd77d4d-srf9w AddedInterface Add eth0 [10.128.2.61/23] from ovn-kubernetes logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:24 +0000 UTC Normal Pod with-badger-677fd77d4d-srf9w.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:24 +0000 UTC Normal ReplicaSet.apps with-badger-677fd77d4d SuccessfulCreate Created pod: with-badger-677fd77d4d-srf9w replicaset-controller logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:24 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-677fd77d4d to 1 deployment-controller logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:25 +0000 UTC Normal Pod with-badger-677fd77d4d-srf9w.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:25 +0000 UTC Normal Pod with-badger-677fd77d4d-srf9w.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:25 +0000 UTC Normal Pod with-badger-677fd77d4d-srf9w.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:25 +0000 UTC Normal Pod with-badger-677fd77d4d-srf9w.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:25 +0000 UTC Normal Pod with-badger-677fd77d4d-srf9w.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:31 +0000 UTC Normal Pod with-badger-677fd77d4d-srf9w.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:31 +0000 UTC Normal Pod with-badger-677fd77d4d-srf9w.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:31 +0000 UTC Normal ReplicaSet.apps with-badger-677fd77d4d SuccessfulDelete Deleted pod: with-badger-677fd77d4d-srf9w replicaset-controller logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:31 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled down replica set with-badger-677fd77d4d to 0 from 1 deployment-controller logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:32 +0000 UTC Normal Pod with-badger-5996cc4bf7-8zbnz Binding Scheduled Successfully assigned kuttl-test-trusting-mallard/with-badger-5996cc4bf7-8zbnz to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:32 +0000 UTC Normal Pod with-badger-5996cc4bf7-8zbnz AddedInterface Add eth0 [10.129.2.43/23] from ovn-kubernetes logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:32 +0000 UTC Normal Pod with-badger-5996cc4bf7-8zbnz.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:32 +0000 UTC Normal ReplicaSet.apps with-badger-5996cc4bf7 SuccessfulCreate Created pod: with-badger-5996cc4bf7-8zbnz replicaset-controller logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:32 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-5996cc4bf7 to 1 deployment-controller logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:33 +0000 UTC Normal Pod with-badger-5996cc4bf7-8zbnz.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:33 +0000 UTC Normal Pod with-badger-5996cc4bf7-8zbnz.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:33 +0000 UTC Normal Pod with-badger-5996cc4bf7-8zbnz.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:33 +0000 UTC Normal Pod with-badger-5996cc4bf7-8zbnz.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:33 +0000 UTC Normal Pod with-badger-5996cc4bf7-8zbnz.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:35 +0000 UTC Normal Pod check-span-vc7v5 Binding Scheduled Successfully assigned kuttl-test-trusting-mallard/check-span-vc7v5 to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:35 +0000 UTC Normal Pod check-span-vc7v5 AddedInterface Add eth0 [10.131.0.47/23] from ovn-kubernetes logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:35 +0000 UTC Normal Pod check-span-vc7v5.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:35 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-vc7v5 job-controller logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:35 +0000 UTC Normal Pod report-span-dqc9j Binding Scheduled Successfully assigned kuttl-test-trusting-mallard/report-span-dqc9j to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:35 +0000 UTC Normal Pod report-span-dqc9j AddedInterface Add eth0 [10.128.2.62/23] from ovn-kubernetes logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:35 +0000 UTC Normal Pod report-span-dqc9j.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:35 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-dqc9j job-controller logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:36 +0000 UTC Normal Pod check-span-vc7v5.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:36 +0000 UTC Normal Pod check-span-vc7v5.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:36 +0000 UTC Normal Pod report-span-dqc9j.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:36 +0000 UTC Normal Pod report-span-dqc9j.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:41:46 | examples-with-badger | 2023-11-06 08:41:46 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:41:46 | examples-with-badger | Deleting namespace: kuttl-test-trusting-mallard === CONT kuttl/harness/examples-simplest logger.go:42: 08:41:58 | examples-simplest | Creating namespace: kuttl-test-social-silkworm logger.go:42: 08:41:58 | examples-simplest/0-install | starting test step 0-install logger.go:42: 08:41:58 | examples-simplest/0-install | Jaeger:kuttl-test-social-silkworm/simplest created logger.go:42: 08:42:05 | examples-simplest/0-install | test step completed 0-install logger.go:42: 08:42:05 | examples-simplest/1-smoke-test | starting test step 1-smoke-test logger.go:42: 08:42:05 | examples-simplest/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 08:42:06 | examples-simplest/1-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:42:12 | examples-simplest/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:42:13 | examples-simplest/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:42:13 | examples-simplest/1-smoke-test | job.batch/report-span created logger.go:42: 08:42:13 | examples-simplest/1-smoke-test | job.batch/check-span created logger.go:42: 08:42:24 | examples-simplest/1-smoke-test | test step completed 1-smoke-test logger.go:42: 08:42:24 | examples-simplest | examples-simplest events from ns kuttl-test-social-silkworm: logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:01 +0000 UTC Normal Pod simplest-6cdb689b87-cglvn Binding Scheduled Successfully assigned kuttl-test-social-silkworm/simplest-6cdb689b87-cglvn to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:01 +0000 UTC Normal ReplicaSet.apps simplest-6cdb689b87 SuccessfulCreate Created pod: simplest-6cdb689b87-cglvn replicaset-controller logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:01 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-6cdb689b87 to 1 deployment-controller logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:02 +0000 UTC Warning Pod simplest-6cdb689b87-cglvn FailedMount MountVolume.SetUp failed for volume "simplest-collector-tls-config-volume" : secret "simplest-collector-headless-tls" not found kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:02 +0000 UTC Warning Pod simplest-6cdb689b87-cglvn FailedMount MountVolume.SetUp failed for volume "simplest-ui-oauth-proxy-tls" : secret "simplest-ui-oauth-proxy-tls" not found kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:02 +0000 UTC Normal Pod simplest-6cdb689b87-cglvn AddedInterface Add eth0 [10.129.2.44/23] from ovn-kubernetes logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:02 +0000 UTC Normal Pod simplest-6cdb689b87-cglvn.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:03 +0000 UTC Normal Pod simplest-6cdb689b87-cglvn.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:03 +0000 UTC Normal Pod simplest-6cdb689b87-cglvn.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:03 +0000 UTC Normal Pod simplest-6cdb689b87-cglvn.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:03 +0000 UTC Normal Pod simplest-6cdb689b87-cglvn.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:03 +0000 UTC Normal Pod simplest-6cdb689b87-cglvn.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:07 +0000 UTC Normal Pod simplest-6cdb689b87-cglvn.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:07 +0000 UTC Normal Pod simplest-6cdb689b87-cglvn.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:07 +0000 UTC Normal ReplicaSet.apps simplest-6cdb689b87 SuccessfulDelete Deleted pod: simplest-6cdb689b87-cglvn replicaset-controller logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:07 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-6cdb689b87 to 0 from 1 deployment-controller logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:08 +0000 UTC Normal Pod simplest-544b99698-cghhw Binding Scheduled Successfully assigned kuttl-test-social-silkworm/simplest-544b99698-cghhw to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:08 +0000 UTC Normal ReplicaSet.apps simplest-544b99698 SuccessfulCreate Created pod: simplest-544b99698-cghhw replicaset-controller logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:08 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-544b99698 to 1 deployment-controller logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:09 +0000 UTC Normal Pod simplest-544b99698-cghhw AddedInterface Add eth0 [10.129.2.45/23] from ovn-kubernetes logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:09 +0000 UTC Normal Pod simplest-544b99698-cghhw.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:09 +0000 UTC Normal Pod simplest-544b99698-cghhw.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:09 +0000 UTC Normal Pod simplest-544b99698-cghhw.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:09 +0000 UTC Normal Pod simplest-544b99698-cghhw.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:09 +0000 UTC Normal Pod simplest-544b99698-cghhw.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:09 +0000 UTC Normal Pod simplest-544b99698-cghhw.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:13 +0000 UTC Normal Pod check-span-d629c Binding Scheduled Successfully assigned kuttl-test-social-silkworm/check-span-d629c to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:13 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-d629c job-controller logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:13 +0000 UTC Normal Pod report-span-kqb4d Binding Scheduled Successfully assigned kuttl-test-social-silkworm/report-span-kqb4d to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:13 +0000 UTC Normal Pod report-span-kqb4d AddedInterface Add eth0 [10.128.2.63/23] from ovn-kubernetes logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:13 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-kqb4d job-controller logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:14 +0000 UTC Normal Pod check-span-d629c AddedInterface Add eth0 [10.131.0.48/23] from ovn-kubernetes logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:14 +0000 UTC Normal Pod check-span-d629c.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:14 +0000 UTC Normal Pod check-span-d629c.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:14 +0000 UTC Normal Pod check-span-d629c.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:14 +0000 UTC Normal Pod report-span-kqb4d.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:14 +0000 UTC Normal Pod report-span-kqb4d.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:14 +0000 UTC Normal Pod report-span-kqb4d.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:42:24 | examples-simplest | 2023-11-06 08:42:24 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:42:24 | examples-simplest | Deleting namespace: kuttl-test-social-silkworm === CONT kuttl/harness/examples-simple-prod-with-volumes logger.go:42: 08:42:36 | examples-simple-prod-with-volumes | Ignoring 03-check-volume.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:42:36 | examples-simple-prod-with-volumes | Creating namespace: kuttl-test-known-beagle logger.go:42: 08:42:36 | examples-simple-prod-with-volumes/1-install | starting test step 1-install logger.go:42: 08:42:36 | examples-simple-prod-with-volumes/1-install | Jaeger:kuttl-test-known-beagle/simple-prod created logger.go:42: 08:43:12 | examples-simple-prod-with-volumes/1-install | test step completed 1-install logger.go:42: 08:43:12 | examples-simple-prod-with-volumes/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:43:12 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 08:43:14 | examples-simple-prod-with-volumes/2-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:43:20 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:43:20 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:43:21 | examples-simple-prod-with-volumes/2-smoke-test | job.batch/report-span created logger.go:42: 08:43:21 | examples-simple-prod-with-volumes/2-smoke-test | job.batch/check-span created logger.go:42: 08:43:33 | examples-simple-prod-with-volumes/2-smoke-test | test step completed 2-smoke-test logger.go:42: 08:43:33 | examples-simple-prod-with-volumes/3-check-volume | starting test step 3-check-volume logger.go:42: 08:43:33 | examples-simple-prod-with-volumes/3-check-volume | running command: [sh -c kubectl exec $(kubectl get pods -n $NAMESPACE -l app=jaeger -l app.kubernetes.io/component=collector -o yaml | /tmp/jaeger-tests/bin/yq e '.items[0].metadata.name') -n $NAMESPACE -- ls /usr/share/elasticsearch/data] logger.go:42: 08:43:33 | examples-simple-prod-with-volumes/3-check-volume | test step completed 3-check-volume logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | examples-simple-prod-with-volumes events from ns kuttl-test-known-beagle: logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:42:42 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestknownbeaglesimpleprod-1-7946b5449b SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestknownbeaglesimpleprod-1-7946b54nszmx replicaset-controller logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:42:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestknownbeaglesimpleprod-1-7946b54nszmx Binding Scheduled Successfully assigned kuttl-test-known-beagle/elasticsearch-cdm-kuttltestknownbeaglesimpleprod-1-7946b54nszmx to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:42:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestknownbeaglesimpleprod-1-7946b54nszmx AddedInterface Add eth0 [10.129.2.46/23] from ovn-kubernetes logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:42:42 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestknownbeaglesimpleprod-1-7946b54nszmx.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:42:42 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestknownbeaglesimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestknownbeaglesimpleprod-1-7946b5449b to 1 deployment-controller logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:42:43 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestknownbeaglesimpleprod-1-7946b54nszmx.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:42:43 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestknownbeaglesimpleprod-1-7946b54nszmx.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:42:43 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestknownbeaglesimpleprod-1-7946b54nszmx.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:42:43 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestknownbeaglesimpleprod-1-7946b54nszmx.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:42:43 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestknownbeaglesimpleprod-1-7946b54nszmx.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:42:58 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestknownbeaglesimpleprod-1-7946b54nszmx.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:09 +0000 UTC Normal Pod simple-prod-collector-55ff468b9d-nb8w4 Binding Scheduled Successfully assigned kuttl-test-known-beagle/simple-prod-collector-55ff468b9d-nb8w4 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:09 +0000 UTC Warning Pod simple-prod-collector-55ff468b9d-nb8w4 FailedMount MountVolume.SetUp failed for volume "simple-prod-collector-tls-config-volume" : secret "simple-prod-collector-headless-tls" not found kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:09 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-55ff468b9d SuccessfulCreate Created pod: simple-prod-collector-55ff468b9d-nb8w4 replicaset-controller logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:09 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-55ff468b9d to 1 deployment-controller logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:09 +0000 UTC Normal Pod simple-prod-query-647dc86bdd-zvfwg Binding Scheduled Successfully assigned kuttl-test-known-beagle/simple-prod-query-647dc86bdd-zvfwg to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:09 +0000 UTC Warning Pod simple-prod-query-647dc86bdd-zvfwg FailedMount MountVolume.SetUp failed for volume "simple-prod-ui-oauth-proxy-tls" : secret "simple-prod-ui-oauth-proxy-tls" not found kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:09 +0000 UTC Normal ReplicaSet.apps simple-prod-query-647dc86bdd SuccessfulCreate Created pod: simple-prod-query-647dc86bdd-zvfwg replicaset-controller logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:09 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-647dc86bdd to 1 deployment-controller logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:10 +0000 UTC Normal Pod simple-prod-collector-55ff468b9d-nb8w4 AddedInterface Add eth0 [10.128.2.64/23] from ovn-kubernetes logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:10 +0000 UTC Normal Pod simple-prod-collector-55ff468b9d-nb8w4.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:10 +0000 UTC Normal Pod simple-prod-collector-55ff468b9d-nb8w4.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:10 +0000 UTC Normal Pod simple-prod-collector-55ff468b9d-nb8w4.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:10 +0000 UTC Normal Pod simple-prod-query-647dc86bdd-zvfwg AddedInterface Add eth0 [10.131.0.49/23] from ovn-kubernetes logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:10 +0000 UTC Normal Pod simple-prod-query-647dc86bdd-zvfwg.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:10 +0000 UTC Normal Pod simple-prod-query-647dc86bdd-zvfwg.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:10 +0000 UTC Normal Pod simple-prod-query-647dc86bdd-zvfwg.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:10 +0000 UTC Normal Pod simple-prod-query-647dc86bdd-zvfwg.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:10 +0000 UTC Normal Pod simple-prod-query-647dc86bdd-zvfwg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:10 +0000 UTC Normal Pod simple-prod-query-647dc86bdd-zvfwg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:10 +0000 UTC Normal Pod simple-prod-query-647dc86bdd-zvfwg.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:10 +0000 UTC Normal Pod simple-prod-query-647dc86bdd-zvfwg.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:10 +0000 UTC Normal Pod simple-prod-query-647dc86bdd-zvfwg.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:15 +0000 UTC Normal Pod simple-prod-query-647dc86bdd-zvfwg.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:15 +0000 UTC Normal Pod simple-prod-query-647dc86bdd-zvfwg.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:15 +0000 UTC Normal Pod simple-prod-query-647dc86bdd-zvfwg.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:15 +0000 UTC Normal ReplicaSet.apps simple-prod-query-647dc86bdd SuccessfulDelete Deleted pod: simple-prod-query-647dc86bdd-zvfwg replicaset-controller logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:15 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-647dc86bdd to 0 from 1 deployment-controller logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:16 +0000 UTC Normal Pod simple-prod-query-85cbdfb69d-9qlpg Binding Scheduled Successfully assigned kuttl-test-known-beagle/simple-prod-query-85cbdfb69d-9qlpg to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:16 +0000 UTC Normal ReplicaSet.apps simple-prod-query-85cbdfb69d SuccessfulCreate Created pod: simple-prod-query-85cbdfb69d-9qlpg replicaset-controller logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:16 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-85cbdfb69d to 1 deployment-controller logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:17 +0000 UTC Normal Pod simple-prod-query-85cbdfb69d-9qlpg AddedInterface Add eth0 [10.131.0.50/23] from ovn-kubernetes logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:17 +0000 UTC Normal Pod simple-prod-query-85cbdfb69d-9qlpg.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:17 +0000 UTC Normal Pod simple-prod-query-85cbdfb69d-9qlpg.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:17 +0000 UTC Normal Pod simple-prod-query-85cbdfb69d-9qlpg.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:17 +0000 UTC Normal Pod simple-prod-query-85cbdfb69d-9qlpg.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:17 +0000 UTC Normal Pod simple-prod-query-85cbdfb69d-9qlpg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:17 +0000 UTC Normal Pod simple-prod-query-85cbdfb69d-9qlpg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:17 +0000 UTC Normal Pod simple-prod-query-85cbdfb69d-9qlpg.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:17 +0000 UTC Normal Pod simple-prod-query-85cbdfb69d-9qlpg.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:17 +0000 UTC Normal Pod simple-prod-query-85cbdfb69d-9qlpg.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:21 +0000 UTC Normal Pod check-span-r5spc Binding Scheduled Successfully assigned kuttl-test-known-beagle/check-span-r5spc to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:21 +0000 UTC Normal Pod check-span-r5spc AddedInterface Add eth0 [10.128.2.66/23] from ovn-kubernetes logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:21 +0000 UTC Normal Pod check-span-r5spc.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:21 +0000 UTC Normal Pod check-span-r5spc.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:21 +0000 UTC Normal Pod check-span-r5spc.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:21 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-r5spc job-controller logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:21 +0000 UTC Normal Pod report-span-5gbhj Binding Scheduled Successfully assigned kuttl-test-known-beagle/report-span-5gbhj to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:21 +0000 UTC Normal Pod report-span-5gbhj AddedInterface Add eth0 [10.128.2.65/23] from ovn-kubernetes logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:21 +0000 UTC Normal Pod report-span-5gbhj.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:21 +0000 UTC Normal Pod report-span-5gbhj.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:21 +0000 UTC Normal Pod report-span-5gbhj.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:21 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-5gbhj job-controller logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:24 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:24 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:24 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | 2023-11-06 08:43:32 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:43:33 | examples-simple-prod-with-volumes | Deleting namespace: kuttl-test-known-beagle === CONT kuttl/harness/examples-simple-prod logger.go:42: 08:43:40 | examples-simple-prod | Creating namespace: kuttl-test-absolute-crow logger.go:42: 08:43:40 | examples-simple-prod/1-install | starting test step 1-install logger.go:42: 08:43:40 | examples-simple-prod/1-install | Jaeger:kuttl-test-absolute-crow/simple-prod created logger.go:42: 08:44:17 | examples-simple-prod/1-install | test step completed 1-install logger.go:42: 08:44:17 | examples-simple-prod/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:44:17 | examples-simple-prod/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 08:44:19 | examples-simple-prod/2-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:44:26 | examples-simple-prod/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:44:27 | examples-simple-prod/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:44:27 | examples-simple-prod/2-smoke-test | job.batch/report-span created logger.go:42: 08:44:27 | examples-simple-prod/2-smoke-test | job.batch/check-span created logger.go:42: 08:44:39 | examples-simple-prod/2-smoke-test | test step completed 2-smoke-test logger.go:42: 08:44:39 | examples-simple-prod | examples-simple-prod events from ns kuttl-test-absolute-crow: logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:43:47 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestabsolutecrowsimpleprod-1-7c5f549bc4 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestabsolutecrowsimpleprod-1-7c5f54kfvp7 replicaset-controller logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:43:47 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestabsolutecrowsimpleprod-1-7c5f54kfvp7 Binding Scheduled Successfully assigned kuttl-test-absolute-crow/elasticsearch-cdm-kuttltestabsolutecrowsimpleprod-1-7c5f54kfvp7 to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:43:47 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestabsolutecrowsimpleprod-1-7c5f54kfvp7 FailedMount MountVolume.SetUp failed for volume "elasticsearch-metrics" : secret "elasticsearch-metrics" not found kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:43:47 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestabsolutecrowsimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestabsolutecrowsimpleprod-1-7c5f549bc4 to 1 deployment-controller logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:43:48 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestabsolutecrowsimpleprod-1-7c5f54kfvp7 AddedInterface Add eth0 [10.129.2.47/23] from ovn-kubernetes logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:43:48 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestabsolutecrowsimpleprod-1-7c5f54kfvp7.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:43:48 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestabsolutecrowsimpleprod-1-7c5f54kfvp7.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:43:48 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestabsolutecrowsimpleprod-1-7c5f54kfvp7.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:43:48 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestabsolutecrowsimpleprod-1-7c5f54kfvp7.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:43:48 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestabsolutecrowsimpleprod-1-7c5f54kfvp7.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:43:48 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestabsolutecrowsimpleprod-1-7c5f54kfvp7.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:43:58 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestabsolutecrowsimpleprod-1-7c5f54kfvp7.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:03 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestabsolutecrowsimpleprod-1-7c5f54kfvp7.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:14 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-6h9zp Binding Scheduled Successfully assigned kuttl-test-absolute-crow/simple-prod-collector-77fcbdc546-6h9zp to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:14 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-6h9zp AddedInterface Add eth0 [10.128.2.67/23] from ovn-kubernetes logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:14 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-6h9zp.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:14 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-6h9zp.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:14 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-6h9zp.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:14 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-77fcbdc546 SuccessfulCreate Created pod: simple-prod-collector-77fcbdc546-6h9zp replicaset-controller logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:14 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-77fcbdc546 to 1 deployment-controller logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:14 +0000 UTC Normal Pod simple-prod-query-679b8d8645-bvtdz Binding Scheduled Successfully assigned kuttl-test-absolute-crow/simple-prod-query-679b8d8645-bvtdz to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:14 +0000 UTC Normal Pod simple-prod-query-679b8d8645-bvtdz AddedInterface Add eth0 [10.131.0.51/23] from ovn-kubernetes logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:14 +0000 UTC Normal Pod simple-prod-query-679b8d8645-bvtdz.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:14 +0000 UTC Normal Pod simple-prod-query-679b8d8645-bvtdz.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:14 +0000 UTC Normal Pod simple-prod-query-679b8d8645-bvtdz.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:14 +0000 UTC Normal Pod simple-prod-query-679b8d8645-bvtdz.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:14 +0000 UTC Normal Pod simple-prod-query-679b8d8645-bvtdz.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:14 +0000 UTC Normal Pod simple-prod-query-679b8d8645-bvtdz.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:14 +0000 UTC Normal Pod simple-prod-query-679b8d8645-bvtdz.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:14 +0000 UTC Normal ReplicaSet.apps simple-prod-query-679b8d8645 SuccessfulCreate Created pod: simple-prod-query-679b8d8645-bvtdz replicaset-controller logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:14 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-679b8d8645 to 1 deployment-controller logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:15 +0000 UTC Normal Pod simple-prod-query-679b8d8645-bvtdz.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:15 +0000 UTC Normal Pod simple-prod-query-679b8d8645-bvtdz.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:22 +0000 UTC Normal Pod simple-prod-query-679b8d8645-bvtdz.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:22 +0000 UTC Normal Pod simple-prod-query-679b8d8645-bvtdz.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:22 +0000 UTC Normal Pod simple-prod-query-679b8d8645-bvtdz.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:22 +0000 UTC Normal ReplicaSet.apps simple-prod-query-679b8d8645 SuccessfulDelete Deleted pod: simple-prod-query-679b8d8645-bvtdz replicaset-controller logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:22 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-679b8d8645 to 0 from 1 deployment-controller logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:23 +0000 UTC Normal Pod simple-prod-query-796fb89cf6-2ltjm Binding Scheduled Successfully assigned kuttl-test-absolute-crow/simple-prod-query-796fb89cf6-2ltjm to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:23 +0000 UTC Normal ReplicaSet.apps simple-prod-query-796fb89cf6 SuccessfulCreate Created pod: simple-prod-query-796fb89cf6-2ltjm replicaset-controller logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:23 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-796fb89cf6 to 1 deployment-controller logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:24 +0000 UTC Normal Pod simple-prod-query-796fb89cf6-2ltjm AddedInterface Add eth0 [10.131.0.52/23] from ovn-kubernetes logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:24 +0000 UTC Normal Pod simple-prod-query-796fb89cf6-2ltjm.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:24 +0000 UTC Normal Pod simple-prod-query-796fb89cf6-2ltjm.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:24 +0000 UTC Normal Pod simple-prod-query-796fb89cf6-2ltjm.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:24 +0000 UTC Normal Pod simple-prod-query-796fb89cf6-2ltjm.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:24 +0000 UTC Normal Pod simple-prod-query-796fb89cf6-2ltjm.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:24 +0000 UTC Normal Pod simple-prod-query-796fb89cf6-2ltjm.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:24 +0000 UTC Normal Pod simple-prod-query-796fb89cf6-2ltjm.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:24 +0000 UTC Normal Pod simple-prod-query-796fb89cf6-2ltjm.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:24 +0000 UTC Normal Pod simple-prod-query-796fb89cf6-2ltjm.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:27 +0000 UTC Normal Pod check-span-99lr5 Binding Scheduled Successfully assigned kuttl-test-absolute-crow/check-span-99lr5 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:27 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-99lr5 job-controller logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:27 +0000 UTC Normal Pod report-span-fn7lt Binding Scheduled Successfully assigned kuttl-test-absolute-crow/report-span-fn7lt to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:27 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-fn7lt job-controller logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:28 +0000 UTC Normal Pod check-span-99lr5 AddedInterface Add eth0 [10.128.2.69/23] from ovn-kubernetes logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:28 +0000 UTC Normal Pod check-span-99lr5.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:28 +0000 UTC Normal Pod check-span-99lr5.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:28 +0000 UTC Normal Pod check-span-99lr5.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:28 +0000 UTC Normal Pod report-span-fn7lt AddedInterface Add eth0 [10.128.2.68/23] from ovn-kubernetes logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:28 +0000 UTC Normal Pod report-span-fn7lt.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:28 +0000 UTC Normal Pod report-span-fn7lt.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:28 +0000 UTC Normal Pod report-span-fn7lt.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:29 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:29 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:29 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:44:39 | examples-simple-prod | 2023-11-06 08:44:38 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:44:39 | examples-simple-prod | Deleting namespace: kuttl-test-absolute-crow === CONT kuttl/harness/examples-business-application-injected-sidecar logger.go:42: 08:44:51 | examples-business-application-injected-sidecar | Creating namespace: kuttl-test-pumped-fowl logger.go:42: 08:44:51 | examples-business-application-injected-sidecar/0-install | starting test step 0-install logger.go:42: 08:44:52 | examples-business-application-injected-sidecar/0-install | Deployment:kuttl-test-pumped-fowl/myapp created logger.go:42: 08:44:52 | examples-business-application-injected-sidecar/0-install | test step completed 0-install logger.go:42: 08:44:52 | examples-business-application-injected-sidecar/1-install | starting test step 1-install logger.go:42: 08:44:52 | examples-business-application-injected-sidecar/1-install | Jaeger:kuttl-test-pumped-fowl/simplest created logger.go:42: 08:45:04 | examples-business-application-injected-sidecar/1-install | test step completed 1-install logger.go:42: 08:45:04 | examples-business-application-injected-sidecar/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:45:04 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 08:45:06 | examples-business-application-injected-sidecar/2-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:45:12 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:45:12 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:45:13 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/report-span created logger.go:42: 08:45:13 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/check-span created logger.go:42: 08:45:24 | examples-business-application-injected-sidecar/2-smoke-test | test step completed 2-smoke-test logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | examples-business-application-injected-sidecar events from ns kuttl-test-pumped-fowl: logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:44:52 +0000 UTC Normal Pod myapp-679f79d5f8-zvg2w Binding Scheduled Successfully assigned kuttl-test-pumped-fowl/myapp-679f79d5f8-zvg2w to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:44:52 +0000 UTC Normal ReplicaSet.apps myapp-679f79d5f8 SuccessfulCreate Created pod: myapp-679f79d5f8-zvg2w replicaset-controller logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:44:52 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-679f79d5f8 to 1 deployment-controller logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:44:53 +0000 UTC Normal Pod myapp-679f79d5f8-zvg2w AddedInterface Add eth0 [10.129.2.48/23] from ovn-kubernetes logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:44:53 +0000 UTC Normal Pod myapp-679f79d5f8-zvg2w.spec.containers{myapp} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:44:55 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-75978cd665 to 1 deployment-controller logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:44:56 +0000 UTC Normal Pod myapp-75978cd665-ngz76 Binding Scheduled Successfully assigned kuttl-test-pumped-fowl/myapp-75978cd665-ngz76 to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:44:56 +0000 UTC Warning Pod myapp-75978cd665-ngz76 FailedMount MountVolume.SetUp failed for volume "simplest-service-ca" : configmap "simplest-service-ca" not found kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:44:56 +0000 UTC Warning Pod myapp-75978cd665-ngz76 FailedMount MountVolume.SetUp failed for volume "simplest-trusted-ca" : configmap "simplest-trusted-ca" not found kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:44:56 +0000 UTC Normal ReplicaSet.apps myapp-75978cd665 SuccessfulCreate Created pod: myapp-75978cd665-ngz76 replicaset-controller logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:44:57 +0000 UTC Normal Pod myapp-679f79d5f8-zvg2w.spec.containers{myapp} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 4.014s (4.014s including waiting) kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:44:57 +0000 UTC Normal Pod myapp-679f79d5f8-zvg2w.spec.containers{myapp} Created Created container myapp kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:44:57 +0000 UTC Normal Pod myapp-679f79d5f8-zvg2w.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:00 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-d8f898f67 to 1 deployment-controller logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:01 +0000 UTC Normal Pod simplest-d8f898f67-crbmh Binding Scheduled Successfully assigned kuttl-test-pumped-fowl/simplest-d8f898f67-crbmh to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:01 +0000 UTC Warning Pod simplest-d8f898f67-crbmh FailedMount MountVolume.SetUp failed for volume "simplest-ui-oauth-proxy-tls" : secret "simplest-ui-oauth-proxy-tls" not found kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:01 +0000 UTC Warning Pod simplest-d8f898f67-crbmh FailedMount MountVolume.SetUp failed for volume "simplest-collector-tls-config-volume" : secret "simplest-collector-headless-tls" not found kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:01 +0000 UTC Normal ReplicaSet.apps simplest-d8f898f67 SuccessfulCreate Created pod: simplest-d8f898f67-crbmh replicaset-controller logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:02 +0000 UTC Normal Pod simplest-d8f898f67-crbmh AddedInterface Add eth0 [10.128.2.71/23] from ovn-kubernetes logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:02 +0000 UTC Normal Pod simplest-d8f898f67-crbmh.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:02 +0000 UTC Normal Pod simplest-d8f898f67-crbmh.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:02 +0000 UTC Normal Pod simplest-d8f898f67-crbmh.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:02 +0000 UTC Normal Pod simplest-d8f898f67-crbmh.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:02 +0000 UTC Normal Pod simplest-d8f898f67-crbmh.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:02 +0000 UTC Normal Pod simplest-d8f898f67-crbmh.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:04 +0000 UTC Warning Pod myapp-679f79d5f8-zvg2w.spec.containers{myapp} Unhealthy Liveness probe failed: Get "http://10.129.2.48:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:04 +0000 UTC Normal Pod myapp-75978cd665-ngz76 AddedInterface Add eth0 [10.131.0.53/23] from ovn-kubernetes logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:04 +0000 UTC Normal Pod myapp-75978cd665-ngz76.spec.containers{myapp} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:08 +0000 UTC Normal Pod myapp-75978cd665-ngz76.spec.containers{myapp} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.955s (3.955s including waiting) kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:08 +0000 UTC Normal Pod myapp-75978cd665-ngz76.spec.containers{myapp} Created Created container myapp kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:08 +0000 UTC Normal Pod myapp-75978cd665-ngz76.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:08 +0000 UTC Normal Pod myapp-75978cd665-ngz76.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:08 +0000 UTC Normal Pod myapp-75978cd665-ngz76.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:08 +0000 UTC Normal Pod myapp-75978cd665-ngz76.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:08 +0000 UTC Normal Pod simplest-d8f898f67-crbmh.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:08 +0000 UTC Normal Pod simplest-d8f898f67-crbmh.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:08 +0000 UTC Normal ReplicaSet.apps simplest-d8f898f67 SuccessfulDelete Deleted pod: simplest-d8f898f67-crbmh replicaset-controller logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:08 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-d8f898f67 to 0 from 1 deployment-controller logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:09 +0000 UTC Normal Pod simplest-75564875b-bbr9t Binding Scheduled Successfully assigned kuttl-test-pumped-fowl/simplest-75564875b-bbr9t to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:09 +0000 UTC Normal Pod simplest-75564875b-bbr9t AddedInterface Add eth0 [10.128.2.72/23] from ovn-kubernetes logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:09 +0000 UTC Normal Pod simplest-75564875b-bbr9t.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:09 +0000 UTC Normal Pod simplest-75564875b-bbr9t.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:09 +0000 UTC Normal Pod simplest-75564875b-bbr9t.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:09 +0000 UTC Normal Pod simplest-75564875b-bbr9t.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:09 +0000 UTC Normal Pod simplest-75564875b-bbr9t.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:09 +0000 UTC Normal Pod simplest-75564875b-bbr9t.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:09 +0000 UTC Normal ReplicaSet.apps simplest-75564875b SuccessfulCreate Created pod: simplest-75564875b-bbr9t replicaset-controller logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:09 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-75564875b to 1 deployment-controller logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:10 +0000 UTC Normal Pod myapp-679f79d5f8-zvg2w.spec.containers{myapp} Killing Stopping container myapp kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:10 +0000 UTC Normal ReplicaSet.apps myapp-679f79d5f8 SuccessfulDelete Deleted pod: myapp-679f79d5f8-zvg2w replicaset-controller logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:10 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled down replica set myapp-679f79d5f8 to 0 from 1 deployment-controller logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:13 +0000 UTC Normal Pod check-span-b4r6r Binding Scheduled Successfully assigned kuttl-test-pumped-fowl/check-span-b4r6r to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:13 +0000 UTC Normal Pod check-span-b4r6r AddedInterface Add eth0 [10.129.2.50/23] from ovn-kubernetes logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:13 +0000 UTC Normal Pod check-span-b4r6r.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:13 +0000 UTC Normal Pod check-span-b4r6r.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:13 +0000 UTC Normal Pod check-span-b4r6r.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:13 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-b4r6r job-controller logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:13 +0000 UTC Normal Pod report-span-q7f9f Binding Scheduled Successfully assigned kuttl-test-pumped-fowl/report-span-q7f9f to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:13 +0000 UTC Normal Pod report-span-q7f9f AddedInterface Add eth0 [10.129.2.49/23] from ovn-kubernetes logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:13 +0000 UTC Normal Pod report-span-q7f9f.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:13 +0000 UTC Normal Pod report-span-q7f9f.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:13 +0000 UTC Normal Pod report-span-q7f9f.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:13 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-q7f9f job-controller logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:15 +0000 UTC Warning Pod myapp-75978cd665-ngz76.spec.containers{myapp} Unhealthy Liveness probe failed: Get "http://10.131.0.53:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | 2023-11-06 08:45:24 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:45:24 | examples-business-application-injected-sidecar | Deleting namespace: kuttl-test-pumped-fowl === CONT kuttl/harness/examples-openshift-with-htpasswd logger.go:42: 08:45:31 | examples-openshift-with-htpasswd | Ignoring 00-install.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:45:31 | examples-openshift-with-htpasswd | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:45:31 | examples-openshift-with-htpasswd | Creating namespace: kuttl-test-correct-bengal logger.go:42: 08:45:31 | examples-openshift-with-htpasswd/0-install | starting test step 0-install logger.go:42: 08:45:31 | examples-openshift-with-htpasswd/0-install | Secret:kuttl-test-correct-bengal/htpasswd created logger.go:42: 08:45:31 | examples-openshift-with-htpasswd/0-install | test step completed 0-install logger.go:42: 08:45:31 | examples-openshift-with-htpasswd/1-install | starting test step 1-install logger.go:42: 08:45:31 | examples-openshift-with-htpasswd/1-install | Jaeger:kuttl-test-correct-bengal/with-htpasswd created logger.go:42: 08:45:36 | examples-openshift-with-htpasswd/1-install | test step completed 1-install logger.go:42: 08:45:36 | examples-openshift-with-htpasswd/2-check-unsecured | starting test step 2-check-unsecured logger.go:42: 08:45:36 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [./ensure-ingress-host.sh] logger.go:42: 08:45:36 | examples-openshift-with-htpasswd/2-check-unsecured | Checking the Ingress host value was populated logger.go:42: 08:45:36 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 0 logger.go:42: 08:45:36 | examples-openshift-with-htpasswd/2-check-unsecured | Hostname is with-htpasswd-kuttl-test-correct-bengal.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com logger.go:42: 08:45:36 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 08:45:36 | examples-openshift-with-htpasswd/2-check-unsecured | Checking an expected HTTP response logger.go:42: 08:45:36 | examples-openshift-with-htpasswd/2-check-unsecured | Running in OpenShift logger.go:42: 08:45:36 | examples-openshift-with-htpasswd/2-check-unsecured | Not using any secret logger.go:42: 08:45:36 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 1/30 the https://with-htpasswd-kuttl-test-correct-bengal.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:45:36 | examples-openshift-with-htpasswd/2-check-unsecured | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 08:45:36 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 2/30 the https://with-htpasswd-kuttl-test-correct-bengal.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:45:36 | examples-openshift-with-htpasswd/2-check-unsecured | HTTP response is 503. 403 expected. Waiting 10 s logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 3/30 the https://with-htpasswd-kuttl-test-correct-bengal.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/2-check-unsecured | curl response asserted properly logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/2-check-unsecured | test step completed 2-check-unsecured logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/3-check-unauthorized | starting test step 3-check-unauthorized logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [./ensure-ingress-host.sh] logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking the Ingress host value was populated logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 0 logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/3-check-unauthorized | Hostname is with-htpasswd-kuttl-test-correct-bengal.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [sh -c JAEGER_USERNAME=wronguser JAEGER_PASSWORD=wrongpassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking an expected HTTP response logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/3-check-unauthorized | Running in OpenShift logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/3-check-unauthorized | Using Jaeger basic authentication logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 1/30 the https://with-htpasswd-kuttl-test-correct-bengal.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/3-check-unauthorized | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 2/30 the https://with-htpasswd-kuttl-test-correct-bengal.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/3-check-unauthorized | curl response asserted properly logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/3-check-unauthorized | test step completed 3-check-unauthorized logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/4-check-authorized | starting test step 4-check-authorized logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/4-check-authorized | running command: [./ensure-ingress-host.sh] logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/4-check-authorized | Checking the Ingress host value was populated logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/4-check-authorized | Try number 0 logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/4-check-authorized | Hostname is with-htpasswd-kuttl-test-correct-bengal.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com logger.go:42: 08:45:46 | examples-openshift-with-htpasswd/4-check-authorized | running command: [sh -c JAEGER_USERNAME=awesomeuser JAEGER_PASSWORD=awesomepassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE with-htpasswd] logger.go:42: 08:45:47 | examples-openshift-with-htpasswd/4-check-authorized | Checking an expected HTTP response logger.go:42: 08:45:47 | examples-openshift-with-htpasswd/4-check-authorized | Running in OpenShift logger.go:42: 08:45:47 | examples-openshift-with-htpasswd/4-check-authorized | Using Jaeger basic authentication logger.go:42: 08:45:47 | examples-openshift-with-htpasswd/4-check-authorized | Try number 1/30 the https://with-htpasswd-kuttl-test-correct-bengal.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:45:47 | examples-openshift-with-htpasswd/4-check-authorized | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 08:45:47 | examples-openshift-with-htpasswd/4-check-authorized | Try number 2/30 the https://with-htpasswd-kuttl-test-correct-bengal.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:45:47 | examples-openshift-with-htpasswd/4-check-authorized | curl response asserted properly logger.go:42: 08:45:47 | examples-openshift-with-htpasswd/4-check-authorized | test step completed 4-check-authorized logger.go:42: 08:45:47 | examples-openshift-with-htpasswd | examples-openshift-with-htpasswd events from ns kuttl-test-correct-bengal: logger.go:42: 08:45:47 | examples-openshift-with-htpasswd | 2023-11-06 08:45:34 +0000 UTC Normal Pod with-htpasswd-667485b898-gzn7q Binding Scheduled Successfully assigned kuttl-test-correct-bengal/with-htpasswd-667485b898-gzn7q to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:45:47 | examples-openshift-with-htpasswd | 2023-11-06 08:45:34 +0000 UTC Normal ReplicaSet.apps with-htpasswd-667485b898 SuccessfulCreate Created pod: with-htpasswd-667485b898-gzn7q replicaset-controller logger.go:42: 08:45:47 | examples-openshift-with-htpasswd | 2023-11-06 08:45:34 +0000 UTC Normal Deployment.apps with-htpasswd ScalingReplicaSet Scaled up replica set with-htpasswd-667485b898 to 1 deployment-controller logger.go:42: 08:45:47 | examples-openshift-with-htpasswd | 2023-11-06 08:45:35 +0000 UTC Warning Pod with-htpasswd-667485b898-gzn7q FailedMount MountVolume.SetUp failed for volume "with-htpasswd-collector-tls-config-volume" : secret "with-htpasswd-collector-headless-tls" not found kubelet logger.go:42: 08:45:47 | examples-openshift-with-htpasswd | 2023-11-06 08:45:35 +0000 UTC Normal Pod with-htpasswd-667485b898-gzn7q AddedInterface Add eth0 [10.129.2.51/23] from ovn-kubernetes logger.go:42: 08:45:47 | examples-openshift-with-htpasswd | 2023-11-06 08:45:35 +0000 UTC Normal Pod with-htpasswd-667485b898-gzn7q.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:45:47 | examples-openshift-with-htpasswd | 2023-11-06 08:45:35 +0000 UTC Normal Pod with-htpasswd-667485b898-gzn7q.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:45:47 | examples-openshift-with-htpasswd | 2023-11-06 08:45:36 +0000 UTC Normal Pod with-htpasswd-667485b898-gzn7q.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:45:47 | examples-openshift-with-htpasswd | 2023-11-06 08:45:36 +0000 UTC Normal Pod with-htpasswd-667485b898-gzn7q.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:45:47 | examples-openshift-with-htpasswd | 2023-11-06 08:45:36 +0000 UTC Normal Pod with-htpasswd-667485b898-gzn7q.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:45:47 | examples-openshift-with-htpasswd | 2023-11-06 08:45:36 +0000 UTC Normal Pod with-htpasswd-667485b898-gzn7q.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:45:47 | examples-openshift-with-htpasswd | Deleting namespace: kuttl-test-correct-bengal === CONT kuttl/harness/examples-openshift-agent-as-daemonset logger.go:42: 08:45:53 | examples-openshift-agent-as-daemonset | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:45:53 | examples-openshift-agent-as-daemonset | Creating namespace: kuttl-test-stunning-whippet logger.go:42: 08:45:53 | examples-openshift-agent-as-daemonset/0-install | starting test step 0-install logger.go:42: 08:45:53 | examples-openshift-agent-as-daemonset/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 08:45:53 | examples-openshift-agent-as-daemonset/0-install | ServiceAccount:kuttl-test-stunning-whippet/jaeger-agent-daemonset created logger.go:42: 08:45:53 | examples-openshift-agent-as-daemonset/0-install | test step completed 0-install logger.go:42: 08:45:53 | examples-openshift-agent-as-daemonset/1-add-policy | starting test step 1-add-policy logger.go:42: 08:45:53 | examples-openshift-agent-as-daemonset/1-add-policy | running command: [sh -c oc adm policy --namespace $NAMESPACE add-scc-to-user daemonset-with-hostport -z jaeger-agent-daemonset] logger.go:42: 08:45:53 | examples-openshift-agent-as-daemonset/1-add-policy | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:daemonset-with-hostport added: "jaeger-agent-daemonset" logger.go:42: 08:45:53 | examples-openshift-agent-as-daemonset/1-add-policy | running command: [sh -c sleep 5] logger.go:42: 08:45:58 | examples-openshift-agent-as-daemonset/1-add-policy | test step completed 1-add-policy logger.go:42: 08:45:58 | examples-openshift-agent-as-daemonset/2-install | starting test step 2-install logger.go:42: 08:45:58 | examples-openshift-agent-as-daemonset/2-install | Jaeger:kuttl-test-stunning-whippet/agent-as-daemonset created logger.go:42: 08:46:04 | examples-openshift-agent-as-daemonset/2-install | test step completed 2-install logger.go:42: 08:46:04 | examples-openshift-agent-as-daemonset/3-install | starting test step 3-install logger.go:42: 08:46:04 | examples-openshift-agent-as-daemonset/3-install | Deployment:kuttl-test-stunning-whippet/vertx-create-span-sidecar created logger.go:42: 08:46:09 | examples-openshift-agent-as-daemonset/3-install | test step completed 3-install logger.go:42: 08:46:09 | examples-openshift-agent-as-daemonset/4-find-service | starting test step 4-find-service logger.go:42: 08:46:09 | examples-openshift-agent-as-daemonset/4-find-service | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 08:46:11 | examples-openshift-agent-as-daemonset/4-find-service | Warning: resource jaegers/agent-as-daemonset is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:46:18 | examples-openshift-agent-as-daemonset/4-find-service | running command: [sh -c SERVICE_NAME=order ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JOB_NUMBER=00 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o find-service-00-job.yaml] logger.go:42: 08:46:18 | examples-openshift-agent-as-daemonset/4-find-service | running command: [sh -c kubectl create -f find-service-00-job.yaml -n $NAMESPACE] logger.go:42: 08:46:19 | examples-openshift-agent-as-daemonset/4-find-service | job.batch/00-find-service created logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset/4-find-service | test step completed 4-find-service logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | examples-openshift-agent-as-daemonset events from ns kuttl-test-stunning-whippet: logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:01 +0000 UTC Normal Pod agent-as-daemonset-55d98f6694-qpngt Binding Scheduled Successfully assigned kuttl-test-stunning-whippet/agent-as-daemonset-55d98f6694-qpngt to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:01 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-55d98f6694 SuccessfulCreate Created pod: agent-as-daemonset-55d98f6694-qpngt replicaset-controller logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:01 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-pqkdv Binding Scheduled Successfully assigned kuttl-test-stunning-whippet/agent-as-daemonset-agent-daemonset-pqkdv to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:01 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-xkmjq Binding Scheduled Successfully assigned kuttl-test-stunning-whippet/agent-as-daemonset-agent-daemonset-xkmjq to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:01 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-z9fwm Binding Scheduled Successfully assigned kuttl-test-stunning-whippet/agent-as-daemonset-agent-daemonset-z9fwm to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:01 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-pqkdv daemonset-controller logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:01 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-z9fwm daemonset-controller logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:01 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-xkmjq daemonset-controller logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:01 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-55d98f6694 to 1 deployment-controller logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:02 +0000 UTC Normal Pod agent-as-daemonset-55d98f6694-qpngt AddedInterface Add eth0 [10.129.2.52/23] from ovn-kubernetes logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:02 +0000 UTC Normal Pod agent-as-daemonset-55d98f6694-qpngt.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:02 +0000 UTC Normal Pod agent-as-daemonset-55d98f6694-qpngt.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:02 +0000 UTC Normal Pod agent-as-daemonset-55d98f6694-qpngt.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:02 +0000 UTC Normal Pod agent-as-daemonset-55d98f6694-qpngt.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:02 +0000 UTC Normal Pod agent-as-daemonset-55d98f6694-qpngt.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:02 +0000 UTC Normal Pod agent-as-daemonset-55d98f6694-qpngt.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:02 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-pqkdv AddedInterface Add eth0 [10.128.2.73/23] from ovn-kubernetes logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:02 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-pqkdv.spec.containers{jaeger-agent-daemonset} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:02 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-xkmjq AddedInterface Add eth0 [10.129.2.53/23] from ovn-kubernetes logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:02 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-xkmjq.spec.containers{jaeger-agent-daemonset} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:02 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-xkmjq.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:02 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-xkmjq.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:02 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-z9fwm AddedInterface Add eth0 [10.131.0.54/23] from ovn-kubernetes logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:02 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-z9fwm.spec.containers{jaeger-agent-daemonset} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:02 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-z9fwm.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:02 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-z9fwm.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:04 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-9mzvt Binding Scheduled Successfully assigned kuttl-test-stunning-whippet/vertx-create-span-sidecar-6c569f6fc6-9mzvt to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:04 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-9mzvt AddedInterface Add eth0 [10.128.2.74/23] from ovn-kubernetes logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:04 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-9mzvt.spec.containers{vertx-create-span-sidecar} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:04 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-6c569f6fc6 SuccessfulCreate Created pod: vertx-create-span-sidecar-6c569f6fc6-9mzvt replicaset-controller logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:04 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-6c569f6fc6 to 1 deployment-controller logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:05 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-pqkdv.spec.containers{jaeger-agent-daemonset} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" in 2.736s (2.736s including waiting) kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:05 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-pqkdv.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:05 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-pqkdv.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:08 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-9mzvt.spec.containers{vertx-create-span-sidecar} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.46s (3.46s including waiting) kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:08 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-9mzvt.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:08 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-9mzvt.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:13 +0000 UTC Normal Pod agent-as-daemonset-55d98f6694-qpngt.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:13 +0000 UTC Normal Pod agent-as-daemonset-55d98f6694-qpngt.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:13 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-55d98f6694 SuccessfulDelete Deleted pod: agent-as-daemonset-55d98f6694-qpngt replicaset-controller logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:13 +0000 UTC Normal Pod agent-as-daemonset-775cdf9969-lv4ql Binding Scheduled Successfully assigned kuttl-test-stunning-whippet/agent-as-daemonset-775cdf9969-lv4ql to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:13 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-775cdf9969 SuccessfulCreate Created pod: agent-as-daemonset-775cdf9969-lv4ql replicaset-controller logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:13 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled down replica set agent-as-daemonset-55d98f6694 to 0 from 1 deployment-controller logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:13 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-775cdf9969 to 1 deployment-controller logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:14 +0000 UTC Normal Pod agent-as-daemonset-775cdf9969-lv4ql AddedInterface Add eth0 [10.129.2.54/23] from ovn-kubernetes logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:14 +0000 UTC Normal Pod agent-as-daemonset-775cdf9969-lv4ql.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:14 +0000 UTC Normal Pod agent-as-daemonset-775cdf9969-lv4ql.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:14 +0000 UTC Normal Pod agent-as-daemonset-775cdf9969-lv4ql.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:14 +0000 UTC Normal Pod agent-as-daemonset-775cdf9969-lv4ql.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:14 +0000 UTC Normal Pod agent-as-daemonset-775cdf9969-lv4ql.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:14 +0000 UTC Normal Pod agent-as-daemonset-775cdf9969-lv4ql.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:16 +0000 UTC Warning Pod vertx-create-span-sidecar-6c569f6fc6-9mzvt.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.74:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:16 +0000 UTC Warning Pod vertx-create-span-sidecar-6c569f6fc6-9mzvt.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.74:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:18 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-9mzvt.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:19 +0000 UTC Normal Pod 00-find-service-sjhjg Binding Scheduled Successfully assigned kuttl-test-stunning-whippet/00-find-service-sjhjg to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:19 +0000 UTC Normal Pod 00-find-service-sjhjg AddedInterface Add eth0 [10.131.0.55/23] from ovn-kubernetes logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:19 +0000 UTC Normal Pod 00-find-service-sjhjg.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:19 +0000 UTC Normal Pod 00-find-service-sjhjg.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:19 +0000 UTC Normal Pod 00-find-service-sjhjg.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:19 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-sjhjg job-controller logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:19 +0000 UTC Warning Pod vertx-create-span-sidecar-6c569f6fc6-9mzvt.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.74:8080/": read tcp 10.128.2.2:53738->10.128.2.74:8080: read: connection reset by peer kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:19 +0000 UTC Warning Pod vertx-create-span-sidecar-6c569f6fc6-9mzvt.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.74:8080/": dial tcp 10.128.2.74:8080: connect: connection refused kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:19 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-9mzvt.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:30 +0000 UTC Warning Pod vertx-create-span-sidecar-6c569f6fc6-9mzvt.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.74:8080/": read tcp 10.128.2.2:43916->10.128.2.74:8080: read: connection reset by peer kubelet logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | 2023-11-06 08:46:46 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 08:46:47 | examples-openshift-agent-as-daemonset | Deleting namespace: kuttl-test-stunning-whippet === CONT kuttl/harness/examples-collector-with-priority-class logger.go:42: 08:46:53 | examples-collector-with-priority-class | Creating namespace: kuttl-test-rested-stud logger.go:42: 08:46:53 | examples-collector-with-priority-class/0-install | starting test step 0-install logger.go:42: 08:46:53 | examples-collector-with-priority-class/0-install | PriorityClass:/collector-high-priority created logger.go:42: 08:46:53 | examples-collector-with-priority-class/0-install | Jaeger:kuttl-test-rested-stud/collector-with-high-priority created logger.go:42: 08:46:59 | examples-collector-with-priority-class/0-install | test step completed 0-install logger.go:42: 08:46:59 | examples-collector-with-priority-class/1-smoke-test | starting test step 1-smoke-test logger.go:42: 08:46:59 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE collector-with-high-priority /dev/null] logger.go:42: 08:47:00 | examples-collector-with-priority-class/1-smoke-test | Warning: resource jaegers/collector-with-high-priority is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:47:07 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:47:07 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:47:07 | examples-collector-with-priority-class/1-smoke-test | job.batch/report-span created logger.go:42: 08:47:07 | examples-collector-with-priority-class/1-smoke-test | job.batch/check-span created logger.go:42: 08:47:18 | examples-collector-with-priority-class/1-smoke-test | test step completed 1-smoke-test logger.go:42: 08:47:18 | examples-collector-with-priority-class | examples-collector-with-priority-class events from ns kuttl-test-rested-stud: logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:46:57 +0000 UTC Normal Pod collector-with-high-priority-7bdc775db5-qx6ht Binding Scheduled Successfully assigned kuttl-test-rested-stud/collector-with-high-priority-7bdc775db5-qx6ht to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:46:57 +0000 UTC Warning Pod collector-with-high-priority-7bdc775db5-qx6ht FailedMount MountVolume.SetUp failed for volume "collector-with-high-priority-ui-oauth-proxy-tls" : secret "collector-with-high-priority-ui-oauth-proxy-tls" not found kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:46:57 +0000 UTC Warning Pod collector-with-high-priority-7bdc775db5-qx6ht FailedMount MountVolume.SetUp failed for volume "collector-with-high-priority-collector-tls-config-volume" : secret "collector-with-high-priority-collector-headless-tls" not found kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:46:57 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-7bdc775db5 SuccessfulCreate Created pod: collector-with-high-priority-7bdc775db5-qx6ht replicaset-controller logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:46:57 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled up replica set collector-with-high-priority-7bdc775db5 to 1 deployment-controller logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:46:58 +0000 UTC Normal Pod collector-with-high-priority-7bdc775db5-qx6ht AddedInterface Add eth0 [10.129.2.55/23] from ovn-kubernetes logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:46:58 +0000 UTC Normal Pod collector-with-high-priority-7bdc775db5-qx6ht.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:46:58 +0000 UTC Normal Pod collector-with-high-priority-7bdc775db5-qx6ht.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:46:58 +0000 UTC Normal Pod collector-with-high-priority-7bdc775db5-qx6ht.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:46:58 +0000 UTC Normal Pod collector-with-high-priority-7bdc775db5-qx6ht.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:46:58 +0000 UTC Normal Pod collector-with-high-priority-7bdc775db5-qx6ht.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:46:58 +0000 UTC Normal Pod collector-with-high-priority-7bdc775db5-qx6ht.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:03 +0000 UTC Normal Pod collector-with-high-priority-7bdc775db5-qx6ht.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:03 +0000 UTC Normal Pod collector-with-high-priority-7bdc775db5-qx6ht.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:03 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-7bdc775db5 SuccessfulDelete Deleted pod: collector-with-high-priority-7bdc775db5-qx6ht replicaset-controller logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:03 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled down replica set collector-with-high-priority-7bdc775db5 to 0 from 1 deployment-controller logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:04 +0000 UTC Normal Pod collector-with-high-priority-5b774fcb6b-w8nlm Binding Scheduled Successfully assigned kuttl-test-rested-stud/collector-with-high-priority-5b774fcb6b-w8nlm to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:04 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-5b774fcb6b SuccessfulCreate Created pod: collector-with-high-priority-5b774fcb6b-w8nlm replicaset-controller logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:04 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled up replica set collector-with-high-priority-5b774fcb6b to 1 deployment-controller logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:05 +0000 UTC Normal Pod collector-with-high-priority-5b774fcb6b-w8nlm AddedInterface Add eth0 [10.129.2.56/23] from ovn-kubernetes logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:05 +0000 UTC Normal Pod collector-with-high-priority-5b774fcb6b-w8nlm.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:05 +0000 UTC Normal Pod collector-with-high-priority-5b774fcb6b-w8nlm.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:05 +0000 UTC Normal Pod collector-with-high-priority-5b774fcb6b-w8nlm.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:05 +0000 UTC Normal Pod collector-with-high-priority-5b774fcb6b-w8nlm.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:05 +0000 UTC Normal Pod collector-with-high-priority-5b774fcb6b-w8nlm.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:05 +0000 UTC Normal Pod collector-with-high-priority-5b774fcb6b-w8nlm.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:07 +0000 UTC Normal Pod check-span-ms6zk Binding Scheduled Successfully assigned kuttl-test-rested-stud/check-span-ms6zk to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:07 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-ms6zk job-controller logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:07 +0000 UTC Normal Pod report-span-qrds4 Binding Scheduled Successfully assigned kuttl-test-rested-stud/report-span-qrds4 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:07 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-qrds4 job-controller logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:08 +0000 UTC Normal Pod check-span-ms6zk AddedInterface Add eth0 [10.131.0.56/23] from ovn-kubernetes logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:08 +0000 UTC Normal Pod check-span-ms6zk.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:08 +0000 UTC Normal Pod check-span-ms6zk.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:08 +0000 UTC Normal Pod check-span-ms6zk.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:08 +0000 UTC Normal Pod report-span-qrds4 AddedInterface Add eth0 [10.128.2.75/23] from ovn-kubernetes logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:08 +0000 UTC Normal Pod report-span-qrds4.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:08 +0000 UTC Normal Pod report-span-qrds4.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:08 +0000 UTC Normal Pod report-span-qrds4.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:47:18 | examples-collector-with-priority-class | 2023-11-06 08:47:18 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:47:18 | examples-collector-with-priority-class | Deleting namespace: kuttl-test-rested-stud === CONT kuttl/harness/examples-all-in-one-with-options logger.go:42: 08:47:30 | examples-all-in-one-with-options | Creating namespace: kuttl-test-sweeping-flea logger.go:42: 08:47:30 | examples-all-in-one-with-options/0-install | starting test step 0-install logger.go:42: 08:47:30 | examples-all-in-one-with-options/0-install | Jaeger:kuttl-test-sweeping-flea/my-jaeger created logger.go:42: 08:47:35 | examples-all-in-one-with-options/0-install | test step completed 0-install logger.go:42: 08:47:35 | examples-all-in-one-with-options/1-smoke-test | starting test step 1-smoke-test logger.go:42: 08:47:35 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 08:47:37 | examples-all-in-one-with-options/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:47:43 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443/jaeger MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:47:44 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:47:44 | examples-all-in-one-with-options/1-smoke-test | job.batch/report-span created logger.go:42: 08:47:44 | examples-all-in-one-with-options/1-smoke-test | job.batch/check-span created logger.go:42: 08:47:55 | examples-all-in-one-with-options/1-smoke-test | test step completed 1-smoke-test logger.go:42: 08:47:55 | examples-all-in-one-with-options | examples-all-in-one-with-options events from ns kuttl-test-sweeping-flea: logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:34 +0000 UTC Normal Pod my-jaeger-d9bfb848b-hqhtb Binding Scheduled Successfully assigned kuttl-test-sweeping-flea/my-jaeger-d9bfb848b-hqhtb to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:34 +0000 UTC Normal Pod my-jaeger-d9bfb848b-hqhtb AddedInterface Add eth0 [10.128.2.76/23] from ovn-kubernetes logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:34 +0000 UTC Normal Pod my-jaeger-d9bfb848b-hqhtb.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:34 +0000 UTC Normal Pod my-jaeger-d9bfb848b-hqhtb.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:34 +0000 UTC Normal Pod my-jaeger-d9bfb848b-hqhtb.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:34 +0000 UTC Normal Pod my-jaeger-d9bfb848b-hqhtb.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:34 +0000 UTC Normal Pod my-jaeger-d9bfb848b-hqhtb.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:34 +0000 UTC Normal Pod my-jaeger-d9bfb848b-hqhtb.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:34 +0000 UTC Normal ReplicaSet.apps my-jaeger-d9bfb848b SuccessfulCreate Created pod: my-jaeger-d9bfb848b-hqhtb replicaset-controller logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:34 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-d9bfb848b to 1 deployment-controller logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:41 +0000 UTC Normal Pod my-jaeger-d9bfb848b-hqhtb.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:41 +0000 UTC Normal Pod my-jaeger-d9bfb848b-hqhtb.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:41 +0000 UTC Normal ReplicaSet.apps my-jaeger-d9bfb848b SuccessfulDelete Deleted pod: my-jaeger-d9bfb848b-hqhtb replicaset-controller logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:41 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-d9bfb848b to 0 from 1 deployment-controller logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:42 +0000 UTC Normal Pod my-jaeger-55fb595f96-wzq8d Binding Scheduled Successfully assigned kuttl-test-sweeping-flea/my-jaeger-55fb595f96-wzq8d to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:42 +0000 UTC Normal Pod my-jaeger-55fb595f96-wzq8d AddedInterface Add eth0 [10.129.2.57/23] from ovn-kubernetes logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:42 +0000 UTC Normal Pod my-jaeger-55fb595f96-wzq8d.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:42 +0000 UTC Normal Pod my-jaeger-55fb595f96-wzq8d.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:42 +0000 UTC Normal ReplicaSet.apps my-jaeger-55fb595f96 SuccessfulCreate Created pod: my-jaeger-55fb595f96-wzq8d replicaset-controller logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:42 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-55fb595f96 to 1 deployment-controller logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:43 +0000 UTC Normal Pod my-jaeger-55fb595f96-wzq8d.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:43 +0000 UTC Normal Pod my-jaeger-55fb595f96-wzq8d.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:43 +0000 UTC Normal Pod my-jaeger-55fb595f96-wzq8d.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:43 +0000 UTC Normal Pod my-jaeger-55fb595f96-wzq8d.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:44 +0000 UTC Normal Pod check-span-bhmrp Binding Scheduled Successfully assigned kuttl-test-sweeping-flea/check-span-bhmrp to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:44 +0000 UTC Normal Pod check-span-bhmrp AddedInterface Add eth0 [10.131.0.57/23] from ovn-kubernetes logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:44 +0000 UTC Normal Pod check-span-bhmrp.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:44 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-bhmrp job-controller logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:44 +0000 UTC Normal Pod report-span-4tzss Binding Scheduled Successfully assigned kuttl-test-sweeping-flea/report-span-4tzss to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:44 +0000 UTC Normal Pod report-span-4tzss AddedInterface Add eth0 [10.128.2.77/23] from ovn-kubernetes logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:44 +0000 UTC Normal Pod report-span-4tzss.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:44 +0000 UTC Normal Pod report-span-4tzss.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:44 +0000 UTC Normal Pod report-span-4tzss.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:44 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-4tzss job-controller logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:45 +0000 UTC Normal Pod check-span-bhmrp.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:45 +0000 UTC Normal Pod check-span-bhmrp.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:47:55 | examples-all-in-one-with-options | 2023-11-06 08:47:55 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:47:55 | examples-all-in-one-with-options | Deleting namespace: kuttl-test-sweeping-flea === CONT kuttl/harness/examples-auto-provision-kafka logger.go:42: 08:48:02 | examples-auto-provision-kafka | Creating namespace: kuttl-test-fun-ringtail logger.go:42: 08:48:02 | examples-auto-provision-kafka/2-install | starting test step 2-install logger.go:42: 08:48:02 | examples-auto-provision-kafka/2-install | Jaeger:kuttl-test-fun-ringtail/auto-provision-kafka created logger.go:42: 08:49:10 | examples-auto-provision-kafka/2-install | test step completed 2-install logger.go:42: 08:49:10 | examples-auto-provision-kafka/3- | starting test step 3- logger.go:42: 08:49:40 | examples-auto-provision-kafka/3- | test step completed 3- logger.go:42: 08:49:40 | examples-auto-provision-kafka/4- | starting test step 4- logger.go:42: 08:50:13 | examples-auto-provision-kafka/4- | test step completed 4- logger.go:42: 08:50:13 | examples-auto-provision-kafka/5- | starting test step 5- logger.go:42: 08:50:21 | examples-auto-provision-kafka/5- | test step completed 5- logger.go:42: 08:50:21 | examples-auto-provision-kafka/6-smoke-test | starting test step 6-smoke-test logger.go:42: 08:50:21 | examples-auto-provision-kafka/6-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE auto-provision-kafka /dev/null] logger.go:42: 08:50:22 | examples-auto-provision-kafka/6-smoke-test | Warning: resource jaegers/auto-provision-kafka is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:50:28 | examples-auto-provision-kafka/6-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JAEGER_COLLECTOR_ENDPOINT=http://auto-provision-kafka-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://auto-provision-kafka-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:50:29 | examples-auto-provision-kafka/6-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:50:29 | examples-auto-provision-kafka/6-smoke-test | job.batch/report-span created logger.go:42: 08:50:29 | examples-auto-provision-kafka/6-smoke-test | job.batch/check-span created logger.go:42: 08:50:41 | examples-auto-provision-kafka/6-smoke-test | test step completed 6-smoke-test logger.go:42: 08:50:41 | examples-auto-provision-kafka | examples-auto-provision-kafka events from ns kuttl-test-fun-ringtail: logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:07 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestfunringtailautoprovisionkafk-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestfunringtailautoprovisionkafk-1-d5886d646 to 1 deployment-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:08 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestfunringtailautoprovisionkafk-1-d5886d646 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestfunringtailautoprovisionkafk-1-jr4sf replicaset-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:08 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfunringtailautoprovisionkafk-1-jr4sf Binding Scheduled Successfully assigned kuttl-test-fun-ringtail/elasticsearch-cdm-kuttltestfunringtailautoprovisionkafk-1-jr4sf to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:08 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestfunringtailautoprovisionkafk-1-jr4sf FailedMount MountVolume.SetUp failed for volume "elasticsearch-metrics" : secret "elasticsearch-metrics" not found kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfunringtailautoprovisionkafk-1-jr4sf AddedInterface Add eth0 [10.129.2.58/23] from ovn-kubernetes logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfunringtailautoprovisionkafk-1-jr4sf.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfunringtailautoprovisionkafk-1-jr4sf.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfunringtailautoprovisionkafk-1-jr4sf.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfunringtailautoprovisionkafk-1-jr4sf.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfunringtailautoprovisionkafk-1-jr4sf.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfunringtailautoprovisionkafk-1-jr4sf.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:24 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestfunringtailautoprovisionkafk-1-jr4sf.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:36 +0000 UTC Normal PodDisruptionBudget.policy auto-provision-kafka-zookeeper NoPods No matching pods found controllermanager logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:36 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:36 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-fun-ringtail/data-auto-provision-kafka-zookeeper-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-7dbf65d878-d4gkx_b1fdd783-ac60-4eb6-adc0-c462cfce464e logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:36 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:40 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-fun-ringtail/auto-provision-kafka-zookeeper-0 to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:40 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-e76040ac-830c-483a-9c55-a41f226f8b23 ebs.csi.aws.com_aws-ebs-csi-driver-controller-7dbf65d878-d4gkx_b1fdd783-ac60-4eb6-adc0-c462cfce464e logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:43 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-e76040ac-830c-483a-9c55-a41f226f8b23" attachdetach-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:50 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0 AddedInterface Add eth0 [10.131.0.58/23] from ovn-kubernetes logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:50 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0.spec.containers{zookeeper} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" already present on machine kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:50 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:48:50 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:11 +0000 UTC Normal PodDisruptionBudget.policy auto-provision-kafka-kafka NoPods No matching pods found controllermanager logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:11 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:11 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:11 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-fun-ringtail/data-0-auto-provision-kafka-kafka-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-7dbf65d878-d4gkx_b1fdd783-ac60-4eb6-adc0-c462cfce464e logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:15 +0000 UTC Normal Pod auto-provision-kafka-kafka-0 Binding Scheduled Successfully assigned kuttl-test-fun-ringtail/auto-provision-kafka-kafka-0 to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:15 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-d7c65c4d-da5e-4c87-9ca3-ad3c20ce5e06 ebs.csi.aws.com_aws-ebs-csi-driver-controller-7dbf65d878-d4gkx_b1fdd783-ac60-4eb6-adc0-c462cfce464e logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:18 +0000 UTC Normal Pod auto-provision-kafka-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-d7c65c4d-da5e-4c87-9ca3-ad3c20ce5e06" attachdetach-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:20 +0000 UTC Normal Pod auto-provision-kafka-kafka-0 AddedInterface Add eth0 [10.131.0.59/23] from ovn-kubernetes logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:20 +0000 UTC Normal Pod auto-provision-kafka-kafka-0.spec.containers{kafka} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" already present on machine kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:20 +0000 UTC Normal Pod auto-provision-kafka-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:20 +0000 UTC Normal Pod auto-provision-kafka-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:41 +0000 UTC Normal Deployment.apps auto-provision-kafka-entity-operator ScalingReplicaSet Scaled up replica set auto-provision-kafka-entity-operator-6fccd6898c to 1 deployment-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:42 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6fccd6898c-qmwpv Binding Scheduled Successfully assigned kuttl-test-fun-ringtail/auto-provision-kafka-entity-operator-6fccd6898c-qmwpv to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:42 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6fccd6898c-qmwpv AddedInterface Add eth0 [10.128.2.78/23] from ovn-kubernetes logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:42 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6fccd6898c-qmwpv.spec.containers{topic-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:464b04e622e0b3472e8a1e1ce8a2efd32cf27fc2056d3d589bfe6b5f9ac0bf4e" already present on machine kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:42 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6fccd6898c-qmwpv.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:42 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6fccd6898c-qmwpv.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:42 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6fccd6898c-qmwpv.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:464b04e622e0b3472e8a1e1ce8a2efd32cf27fc2056d3d589bfe6b5f9ac0bf4e" already present on machine kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:42 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6fccd6898c-qmwpv.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:42 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6fccd6898c-qmwpv.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:42 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6fccd6898c-qmwpv.spec.containers{tls-sidecar} Pulling Pulling image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:42 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-entity-operator-6fccd6898c SuccessfulCreate Created pod: auto-provision-kafka-entity-operator-6fccd6898c-qmwpv replicaset-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:51 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6fccd6898c-qmwpv.spec.containers{tls-sidecar} Pulled Successfully pulled image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" in 8.843s (8.843s including waiting) kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:51 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6fccd6898c-qmwpv.spec.containers{tls-sidecar} Created Created container tls-sidecar kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:49:51 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-6fccd6898c-qmwpv.spec.containers{tls-sidecar} Started Started container tls-sidecar kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:02 +0000 UTC Warning Pod auto-provision-kafka-entity-operator-6fccd6898c-qmwpv.spec.containers{topic-operator} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 500 kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:14 +0000 UTC Normal Pod auto-provision-kafka-collector-6c48d66d8b-zsm79 Binding Scheduled Successfully assigned kuttl-test-fun-ringtail/auto-provision-kafka-collector-6c48d66d8b-zsm79 to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:14 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-collector-6c48d66d8b SuccessfulCreate Created pod: auto-provision-kafka-collector-6c48d66d8b-zsm79 replicaset-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:14 +0000 UTC Normal Deployment.apps auto-provision-kafka-collector ScalingReplicaSet Scaled up replica set auto-provision-kafka-collector-6c48d66d8b to 1 deployment-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:14 +0000 UTC Normal Pod auto-provision-kafka-ingester-57f46dc674-7f7mt Binding Scheduled Successfully assigned kuttl-test-fun-ringtail/auto-provision-kafka-ingester-57f46dc674-7f7mt to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:14 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-ingester-57f46dc674 SuccessfulCreate Created pod: auto-provision-kafka-ingester-57f46dc674-7f7mt replicaset-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:14 +0000 UTC Normal Deployment.apps auto-provision-kafka-ingester ScalingReplicaSet Scaled up replica set auto-provision-kafka-ingester-57f46dc674 to 1 deployment-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:14 +0000 UTC Normal Deployment.apps auto-provision-kafka-query ScalingReplicaSet Scaled up replica set auto-provision-kafka-query-6b7cfc5bc4 to 1 deployment-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:15 +0000 UTC Normal Pod auto-provision-kafka-collector-6c48d66d8b-zsm79 AddedInterface Add eth0 [10.131.0.60/23] from ovn-kubernetes logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:15 +0000 UTC Normal Pod auto-provision-kafka-collector-6c48d66d8b-zsm79.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:15 +0000 UTC Normal Pod auto-provision-kafka-collector-6c48d66d8b-zsm79.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:15 +0000 UTC Normal Pod auto-provision-kafka-collector-6c48d66d8b-zsm79.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:15 +0000 UTC Normal Pod auto-provision-kafka-ingester-57f46dc674-7f7mt AddedInterface Add eth0 [10.128.2.79/23] from ovn-kubernetes logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:15 +0000 UTC Normal Pod auto-provision-kafka-ingester-57f46dc674-7f7mt.spec.containers{jaeger-ingester} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:109a3676bfaccda0021c57bbe82bceed5140faaedb8bad4d7d2be7cd660de039" kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:15 +0000 UTC Normal Pod auto-provision-kafka-query-6b7cfc5bc4-6kmrr Binding Scheduled Successfully assigned kuttl-test-fun-ringtail/auto-provision-kafka-query-6b7cfc5bc4-6kmrr to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:15 +0000 UTC Warning Pod auto-provision-kafka-query-6b7cfc5bc4-6kmrr FailedMount MountVolume.SetUp failed for volume "auto-provision-kafka-ui-oauth-proxy-tls" : secret "auto-provision-kafka-ui-oauth-proxy-tls" not found kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:15 +0000 UTC Normal Pod auto-provision-kafka-query-6b7cfc5bc4-6kmrr AddedInterface Add eth0 [10.131.0.61/23] from ovn-kubernetes logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:15 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-query-6b7cfc5bc4 SuccessfulCreate Created pod: auto-provision-kafka-query-6b7cfc5bc4-6kmrr replicaset-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:16 +0000 UTC Normal Pod auto-provision-kafka-query-6b7cfc5bc4-6kmrr.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:16 +0000 UTC Normal Pod auto-provision-kafka-query-6b7cfc5bc4-6kmrr.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:16 +0000 UTC Normal Pod auto-provision-kafka-query-6b7cfc5bc4-6kmrr.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:16 +0000 UTC Normal Pod auto-provision-kafka-query-6b7cfc5bc4-6kmrr.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:16 +0000 UTC Normal Pod auto-provision-kafka-query-6b7cfc5bc4-6kmrr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:16 +0000 UTC Normal Pod auto-provision-kafka-query-6b7cfc5bc4-6kmrr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:16 +0000 UTC Normal Pod auto-provision-kafka-query-6b7cfc5bc4-6kmrr.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:16 +0000 UTC Normal Pod auto-provision-kafka-query-6b7cfc5bc4-6kmrr.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:16 +0000 UTC Normal Pod auto-provision-kafka-query-6b7cfc5bc4-6kmrr.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:19 +0000 UTC Normal Pod auto-provision-kafka-ingester-57f46dc674-7f7mt.spec.containers{jaeger-ingester} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:109a3676bfaccda0021c57bbe82bceed5140faaedb8bad4d7d2be7cd660de039" in 4.273s (4.273s including waiting) kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:19 +0000 UTC Normal Pod auto-provision-kafka-ingester-57f46dc674-7f7mt.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:19 +0000 UTC Normal Pod auto-provision-kafka-ingester-57f46dc674-7f7mt.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:25 +0000 UTC Normal Pod auto-provision-kafka-query-6b7cfc5bc4-6kmrr.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:25 +0000 UTC Normal Pod auto-provision-kafka-query-6b7cfc5bc4-6kmrr.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:25 +0000 UTC Normal Pod auto-provision-kafka-query-6b7cfc5bc4-6kmrr.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:25 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-query-6b7cfc5bc4 SuccessfulDelete Deleted pod: auto-provision-kafka-query-6b7cfc5bc4-6kmrr replicaset-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:25 +0000 UTC Normal Deployment.apps auto-provision-kafka-query ScalingReplicaSet Scaled down replica set auto-provision-kafka-query-6b7cfc5bc4 to 0 from 1 deployment-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:26 +0000 UTC Normal Pod auto-provision-kafka-query-6598b6799d-xk2td Binding Scheduled Successfully assigned kuttl-test-fun-ringtail/auto-provision-kafka-query-6598b6799d-xk2td to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:26 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-query-6598b6799d SuccessfulCreate Created pod: auto-provision-kafka-query-6598b6799d-xk2td replicaset-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:26 +0000 UTC Normal Deployment.apps auto-provision-kafka-query ScalingReplicaSet Scaled up replica set auto-provision-kafka-query-6598b6799d to 1 deployment-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:27 +0000 UTC Normal Pod auto-provision-kafka-query-6598b6799d-xk2td AddedInterface Add eth0 [10.131.0.62/23] from ovn-kubernetes logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:27 +0000 UTC Normal Pod auto-provision-kafka-query-6598b6799d-xk2td.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:27 +0000 UTC Normal Pod auto-provision-kafka-query-6598b6799d-xk2td.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:27 +0000 UTC Normal Pod auto-provision-kafka-query-6598b6799d-xk2td.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:27 +0000 UTC Normal Pod auto-provision-kafka-query-6598b6799d-xk2td.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:27 +0000 UTC Normal Pod auto-provision-kafka-query-6598b6799d-xk2td.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:27 +0000 UTC Normal Pod auto-provision-kafka-query-6598b6799d-xk2td.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:27 +0000 UTC Normal Pod auto-provision-kafka-query-6598b6799d-xk2td.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:27 +0000 UTC Normal Pod auto-provision-kafka-query-6598b6799d-xk2td.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:27 +0000 UTC Normal Pod auto-provision-kafka-query-6598b6799d-xk2td.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:29 +0000 UTC Normal Pod check-span-4qt5m Binding Scheduled Successfully assigned kuttl-test-fun-ringtail/check-span-4qt5m to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:29 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-4qt5m job-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:29 +0000 UTC Normal Pod report-span-8xxkt Binding Scheduled Successfully assigned kuttl-test-fun-ringtail/report-span-8xxkt to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:29 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-8xxkt job-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:30 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:30 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:30 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:30 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:30 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:30 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:30 +0000 UTC Normal Pod check-span-4qt5m AddedInterface Add eth0 [10.128.2.81/23] from ovn-kubernetes logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:30 +0000 UTC Normal Pod check-span-4qt5m.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:30 +0000 UTC Normal Pod check-span-4qt5m.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:30 +0000 UTC Normal Pod check-span-4qt5m.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:30 +0000 UTC Normal Pod report-span-8xxkt AddedInterface Add eth0 [10.128.2.80/23] from ovn-kubernetes logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:30 +0000 UTC Normal Pod report-span-8xxkt.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:30 +0000 UTC Normal Pod report-span-8xxkt.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:30 +0000 UTC Normal Pod report-span-8xxkt.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:50:41 | examples-auto-provision-kafka | 2023-11-06 08:50:40 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:50:41 | examples-auto-provision-kafka | Deleting namespace: kuttl-test-fun-ringtail === CONT kuttl/harness/examples-agent-with-priority-class logger.go:42: 08:51:21 | examples-agent-with-priority-class | Creating namespace: kuttl-test-stunning-beagle logger.go:42: 08:51:21 | examples-agent-with-priority-class/0-install | starting test step 0-install logger.go:42: 08:51:21 | examples-agent-with-priority-class/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 08:51:21 | examples-agent-with-priority-class/0-install | ServiceAccount:kuttl-test-stunning-beagle/jaeger-agent-daemonset created logger.go:42: 08:51:21 | examples-agent-with-priority-class/0-install | test step completed 0-install logger.go:42: 08:51:21 | examples-agent-with-priority-class/1-install | starting test step 1-install logger.go:42: 08:51:21 | examples-agent-with-priority-class/1-install | PriorityClass:/high-priority created logger.go:42: 08:51:21 | examples-agent-with-priority-class/1-install | Jaeger:kuttl-test-stunning-beagle/agent-as-daemonset created logger.go:42: 08:51:27 | examples-agent-with-priority-class/1-install | test step completed 1-install logger.go:42: 08:51:27 | examples-agent-with-priority-class/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:51:27 | examples-agent-with-priority-class/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 08:51:29 | examples-agent-with-priority-class/2-smoke-test | Warning: resource jaegers/agent-as-daemonset is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:51:35 | examples-agent-with-priority-class/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:51:35 | examples-agent-with-priority-class/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:51:36 | examples-agent-with-priority-class/2-smoke-test | job.batch/report-span created logger.go:42: 08:51:36 | examples-agent-with-priority-class/2-smoke-test | job.batch/check-span created logger.go:42: 08:58:36 | examples-agent-with-priority-class/2-smoke-test | test step failed 2-smoke-test case.go:364: failed in step 2-smoke-test case.go:366: --- Job:kuttl-test-stunning-beagle/check-span +++ Job:kuttl-test-stunning-beagle/check-span @@ -1,8 +1,141 @@ apiVersion: batch/v1 kind: Job metadata: + annotations: + kubectl.kubernetes.io/last-applied-configuration: | + {"apiVersion":"batch/v1","kind":"Job","metadata":{"annotations":{},"name":"check-span","namespace":"kuttl-test-stunning-beagle"},"spec":{"backoffLimit":15,"template":{"spec":{"containers":[{"command":["./query"],"env":[{"name":"SERVICE_NAME","value":"smoke-test-service"},{"name":"QUERY_HOST","value":"https://agent-as-daemonset-query:443"},{"name":"SECRET_PATH","value":"/var/run/secrets/api-token/token"}],"image":"registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d","name":"asserts-container","volumeMounts":[{"mountPath":"/var/run/secrets/api-token","name":"token-api-volume"}]}],"restartPolicy":"OnFailure","volumes":[{"name":"token-api-volume","secret":{"secretName":"e2e-test"}}]}}}} + labels: + batch.kubernetes.io/controller-uid: 9a6c36ed-69ff-472e-b19a-e7ddbd3638a5 + batch.kubernetes.io/job-name: check-span + controller-uid: 9a6c36ed-69ff-472e-b19a-e7ddbd3638a5 + job-name: check-span + managedFields: + - apiVersion: batch/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:kubectl.kubernetes.io/last-applied-configuration: {} + f:spec: + f:backoffLimit: {} + f:completionMode: {} + f:completions: {} + f:parallelism: {} + f:suspend: {} + f:template: + f:spec: + f:containers: + k:{"name":"asserts-container"}: + .: {} + f:command: {} + f:env: + .: {} + k:{"name":"QUERY_HOST"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SECRET_PATH"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SERVICE_NAME"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:name: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/var/run/secrets/api-token"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"token-api-volume"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: kubectl-client-side-apply + operation: Update + time: "2023-11-06T08:51:36Z" + - apiVersion: batch/v1 + fieldsType: FieldsV1 + fieldsV1: + f:status: + f:active: {} + f:ready: {} + f:startTime: {} + f:uncountedTerminatedPods: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2023-11-06T08:56:55Z" name: check-span namespace: kuttl-test-stunning-beagle +spec: + backoffLimit: 15 + completionMode: NonIndexed + completions: 1 + parallelism: 1 + selector: + matchLabels: + batch.kubernetes.io/controller-uid: 9a6c36ed-69ff-472e-b19a-e7ddbd3638a5 + suspend: false + template: + metadata: + creationTimestamp: null + labels: + batch.kubernetes.io/controller-uid: 9a6c36ed-69ff-472e-b19a-e7ddbd3638a5 + batch.kubernetes.io/job-name: check-span + controller-uid: 9a6c36ed-69ff-472e-b19a-e7ddbd3638a5 + job-name: check-span + spec: + containers: + - command: + - ./query + env: + - name: SERVICE_NAME + value: smoke-test-service + - name: QUERY_HOST + value: https://agent-as-daemonset-query:443 + - name: SECRET_PATH + value: /var/run/secrets/api-token/token + image: registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d + imagePullPolicy: IfNotPresent + name: asserts-container + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /var/run/secrets/api-token + name: token-api-volume + dnsPolicy: ClusterFirst + restartPolicy: OnFailure + schedulerName: default-scheduler + securityContext: {} + terminationGracePeriodSeconds: 30 + volumes: + - name: token-api-volume + secret: + defaultMode: 420 + secretName: e2e-test status: - succeeded: 1 + active: 1 + ready: 1 + startTime: "2023-11-06T08:51:36Z" + uncountedTerminatedPods: {} case.go:366: resource Job:kuttl-test-stunning-beagle/check-span: .status.succeeded: key is missing from map logger.go:42: 08:58:36 | examples-agent-with-priority-class | examples-agent-with-priority-class events from ns kuttl-test-stunning-beagle: logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:25 +0000 UTC Normal Pod agent-as-daemonset-6bc5f4cc45-dmjv7 Binding Scheduled Successfully assigned kuttl-test-stunning-beagle/agent-as-daemonset-6bc5f4cc45-dmjv7 to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:25 +0000 UTC Warning Pod agent-as-daemonset-6bc5f4cc45-dmjv7 FailedMount MountVolume.SetUp failed for volume "agent-as-daemonset-ui-oauth-proxy-tls" : secret "agent-as-daemonset-ui-oauth-proxy-tls" not found kubelet logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:25 +0000 UTC Warning Pod agent-as-daemonset-6bc5f4cc45-dmjv7 FailedMount MountVolume.SetUp failed for volume "agent-as-daemonset-collector-tls-config-volume" : secret "agent-as-daemonset-collector-headless-tls" not found kubelet logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:25 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-6bc5f4cc45 SuccessfulCreate Created pod: agent-as-daemonset-6bc5f4cc45-dmjv7 replicaset-controller logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:25 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-6bc5f4cc45 to 1 deployment-controller logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:26 +0000 UTC Normal Pod agent-as-daemonset-6bc5f4cc45-dmjv7 AddedInterface Add eth0 [10.129.2.59/23] from ovn-kubernetes logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:26 +0000 UTC Normal Pod agent-as-daemonset-6bc5f4cc45-dmjv7.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:26 +0000 UTC Normal Pod agent-as-daemonset-6bc5f4cc45-dmjv7.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:26 +0000 UTC Normal Pod agent-as-daemonset-6bc5f4cc45-dmjv7.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:26 +0000 UTC Normal Pod agent-as-daemonset-6bc5f4cc45-dmjv7.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:26 +0000 UTC Normal Pod agent-as-daemonset-6bc5f4cc45-dmjv7.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:26 +0000 UTC Normal Pod agent-as-daemonset-6bc5f4cc45-dmjv7.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:27 +0000 UTC Warning DaemonSet.apps agent-as-daemonset-agent-daemonset FailedCreate Error creating: pods "agent-as-daemonset-agent-daemonset-" is forbidden: unable to validate against any security context constraint: [provider "anyuid": Forbidden: not usable by user or serviceaccount, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5775: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5778: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6831: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6832: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 14271: Host ports are not allowed to be used, provider "restricted": Forbidden: not usable by user or serviceaccount, provider "nonroot-v2": Forbidden: not usable by user or serviceaccount, provider "nonroot": Forbidden: not usable by user or serviceaccount, provider "hostmount-anyuid": Forbidden: not usable by user or serviceaccount, provider "elasticsearch-scc": Forbidden: not usable by user or serviceaccount, provider "machine-api-termination-handler": Forbidden: not usable by user or serviceaccount, provider "daemonset-with-hostport": Forbidden: not usable by user or serviceaccount, provider "hostnetwork-v2": Forbidden: not usable by user or serviceaccount, provider "hostnetwork": Forbidden: not usable by user or serviceaccount, provider "hostaccess": Forbidden: not usable by user or serviceaccount, provider "node-exporter": Forbidden: not usable by user or serviceaccount, provider "privileged": Forbidden: not usable by user or serviceaccount] daemonset-controller logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:36 +0000 UTC Normal Pod check-span-pc45d Binding Scheduled Successfully assigned kuttl-test-stunning-beagle/check-span-pc45d to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:36 +0000 UTC Normal Pod check-span-pc45d AddedInterface Add eth0 [10.131.0.63/23] from ovn-kubernetes logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:36 +0000 UTC Normal Pod check-span-pc45d.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:36 +0000 UTC Normal Pod check-span-pc45d.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:36 +0000 UTC Normal Pod check-span-pc45d.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:36 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-pc45d job-controller logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:36 +0000 UTC Normal Pod report-span-llhsm Binding Scheduled Successfully assigned kuttl-test-stunning-beagle/report-span-llhsm to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:36 +0000 UTC Normal Pod report-span-llhsm AddedInterface Add eth0 [10.128.2.82/23] from ovn-kubernetes logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:36 +0000 UTC Normal Pod report-span-llhsm.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:36 +0000 UTC Normal Pod report-span-llhsm.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:36 +0000 UTC Normal Pod report-span-llhsm.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:51:36 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-llhsm job-controller logger.go:42: 08:58:36 | examples-agent-with-priority-class | 2023-11-06 08:56:41 +0000 UTC Warning Pod check-span-pc45d.spec.containers{asserts-container} BackOff Back-off restarting failed container asserts-container in pod check-span-pc45d_kuttl-test-stunning-beagle(48774d37-99b1-47fd-9cf0-4d84f7949864) kubelet logger.go:42: 08:58:36 | examples-agent-with-priority-class | Deleting namespace: kuttl-test-stunning-beagle === CONT kuttl/harness/examples-agent-as-daemonset logger.go:42: 08:58:42 | examples-agent-as-daemonset | Creating namespace: kuttl-test-workable-lacewing logger.go:42: 08:58:42 | examples-agent-as-daemonset/0-install | starting test step 0-install logger.go:42: 08:58:43 | examples-agent-as-daemonset/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 08:58:43 | examples-agent-as-daemonset/0-install | ServiceAccount:kuttl-test-workable-lacewing/jaeger-agent-daemonset created logger.go:42: 08:58:43 | examples-agent-as-daemonset/0-install | test step completed 0-install logger.go:42: 08:58:43 | examples-agent-as-daemonset/1-install | starting test step 1-install logger.go:42: 08:58:43 | examples-agent-as-daemonset/1-install | Jaeger:kuttl-test-workable-lacewing/agent-as-daemonset created logger.go:42: 08:58:49 | examples-agent-as-daemonset/1-install | test step completed 1-install logger.go:42: 08:58:49 | examples-agent-as-daemonset/2-smoke-test | starting test step 2-smoke-test logger.go:42: 08:58:49 | examples-agent-as-daemonset/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 08:58:50 | examples-agent-as-daemonset/2-smoke-test | Warning: resource jaegers/agent-as-daemonset is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:58:57 | examples-agent-as-daemonset/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 08:58:57 | examples-agent-as-daemonset/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 08:58:58 | examples-agent-as-daemonset/2-smoke-test | job.batch/report-span created logger.go:42: 08:58:58 | examples-agent-as-daemonset/2-smoke-test | job.batch/check-span created logger.go:42: 08:59:09 | examples-agent-as-daemonset/2-smoke-test | test step completed 2-smoke-test logger.go:42: 08:59:09 | examples-agent-as-daemonset | examples-agent-as-daemonset events from ns kuttl-test-workable-lacewing: logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:46 +0000 UTC Normal Pod agent-as-daemonset-5dcd596769-kf9q4 Binding Scheduled Successfully assigned kuttl-test-workable-lacewing/agent-as-daemonset-5dcd596769-kf9q4 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:46 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-5dcd596769 SuccessfulCreate Created pod: agent-as-daemonset-5dcd596769-kf9q4 replicaset-controller logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:46 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-5dcd596769 to 1 deployment-controller logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:47 +0000 UTC Normal Pod agent-as-daemonset-5dcd596769-kf9q4 AddedInterface Add eth0 [10.128.2.83/23] from ovn-kubernetes logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:47 +0000 UTC Normal Pod agent-as-daemonset-5dcd596769-kf9q4.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:47 +0000 UTC Normal Pod agent-as-daemonset-5dcd596769-kf9q4.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:47 +0000 UTC Normal Pod agent-as-daemonset-5dcd596769-kf9q4.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:47 +0000 UTC Normal Pod agent-as-daemonset-5dcd596769-kf9q4.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:47 +0000 UTC Normal Pod agent-as-daemonset-5dcd596769-kf9q4.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:47 +0000 UTC Normal Pod agent-as-daemonset-5dcd596769-kf9q4.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:48 +0000 UTC Warning DaemonSet.apps agent-as-daemonset-agent-daemonset FailedCreate Error creating: pods "agent-as-daemonset-agent-daemonset-" is forbidden: unable to validate against any security context constraint: [provider "anyuid": Forbidden: not usable by user or serviceaccount, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5775: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5778: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6831: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6832: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 14271: Host ports are not allowed to be used, provider "restricted": Forbidden: not usable by user or serviceaccount, provider "nonroot-v2": Forbidden: not usable by user or serviceaccount, provider "nonroot": Forbidden: not usable by user or serviceaccount, provider "hostmount-anyuid": Forbidden: not usable by user or serviceaccount, provider "elasticsearch-scc": Forbidden: not usable by user or serviceaccount, provider "machine-api-termination-handler": Forbidden: not usable by user or serviceaccount, provider "daemonset-with-hostport": Forbidden: not usable by user or serviceaccount, provider "hostnetwork-v2": Forbidden: not usable by user or serviceaccount, provider "hostnetwork": Forbidden: not usable by user or serviceaccount, provider "hostaccess": Forbidden: not usable by user or serviceaccount, provider "node-exporter": Forbidden: not usable by user or serviceaccount, provider "privileged": Forbidden: not usable by user or serviceaccount] daemonset-controller logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:54 +0000 UTC Normal Pod agent-as-daemonset-5dcd596769-kf9q4.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:54 +0000 UTC Normal Pod agent-as-daemonset-5dcd596769-kf9q4.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:54 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-5dcd596769 SuccessfulDelete Deleted pod: agent-as-daemonset-5dcd596769-kf9q4 replicaset-controller logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:54 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled down replica set agent-as-daemonset-5dcd596769 to 0 from 1 deployment-controller logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:55 +0000 UTC Normal Pod agent-as-daemonset-54758c696-7l8s9 Binding Scheduled Successfully assigned kuttl-test-workable-lacewing/agent-as-daemonset-54758c696-7l8s9 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:55 +0000 UTC Normal Pod agent-as-daemonset-54758c696-7l8s9 AddedInterface Add eth0 [10.128.2.84/23] from ovn-kubernetes logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:55 +0000 UTC Normal Pod agent-as-daemonset-54758c696-7l8s9.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:55 +0000 UTC Normal Pod agent-as-daemonset-54758c696-7l8s9.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:55 +0000 UTC Normal Pod agent-as-daemonset-54758c696-7l8s9.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:55 +0000 UTC Normal Pod agent-as-daemonset-54758c696-7l8s9.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:55 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-54758c696 SuccessfulCreate Created pod: agent-as-daemonset-54758c696-7l8s9 replicaset-controller logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:55 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-54758c696 to 1 deployment-controller logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:56 +0000 UTC Normal Pod agent-as-daemonset-54758c696-7l8s9.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:56 +0000 UTC Normal Pod agent-as-daemonset-54758c696-7l8s9.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:58 +0000 UTC Normal Pod check-span-hk47h Binding Scheduled Successfully assigned kuttl-test-workable-lacewing/check-span-hk47h to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:58 +0000 UTC Normal Pod check-span-hk47h AddedInterface Add eth0 [10.131.0.64/23] from ovn-kubernetes logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:58 +0000 UTC Normal Pod check-span-hk47h.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:58 +0000 UTC Normal Pod check-span-hk47h.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:58 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-hk47h job-controller logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:58 +0000 UTC Normal Pod report-span-xl225 Binding Scheduled Successfully assigned kuttl-test-workable-lacewing/report-span-xl225 to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:58 +0000 UTC Normal Pod report-span-xl225 AddedInterface Add eth0 [10.129.2.60/23] from ovn-kubernetes logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:58 +0000 UTC Normal Pod report-span-xl225.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:58 +0000 UTC Normal Pod report-span-xl225.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:58 +0000 UTC Normal Pod report-span-xl225.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:58 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-xl225 job-controller logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:58:59 +0000 UTC Normal Pod check-span-hk47h.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 08:59:09 | examples-agent-as-daemonset | 2023-11-06 08:59:09 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 08:59:09 | examples-agent-as-daemonset | Deleting namespace: kuttl-test-workable-lacewing === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- FAIL: kuttl (1322.12s) --- FAIL: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.75s) --- PASS: kuttl/harness/examples-service-types (53.71s) --- PASS: kuttl/harness/examples-with-sampling (93.57s) --- PASS: kuttl/harness/examples-with-cassandra (53.90s) --- PASS: kuttl/harness/examples-with-badger-and-volume (39.91s) --- PASS: kuttl/harness/examples-with-badger (37.41s) --- PASS: kuttl/harness/examples-simplest (38.31s) --- PASS: kuttl/harness/examples-simple-prod-with-volumes (63.94s) --- PASS: kuttl/harness/examples-simple-prod (71.11s) --- PASS: kuttl/harness/examples-business-application-injected-sidecar (39.59s) --- PASS: kuttl/harness/examples-openshift-with-htpasswd (21.83s) --- PASS: kuttl/harness/examples-openshift-agent-as-daemonset (60.41s) --- PASS: kuttl/harness/examples-collector-with-priority-class (37.11s) --- PASS: kuttl/harness/examples-all-in-one-with-options (31.66s) --- PASS: kuttl/harness/examples-auto-provision-kafka (199.54s) --- FAIL: kuttl/harness/examples-agent-with-priority-class (441.18s) --- PASS: kuttl/harness/examples-agent-as-daemonset (33.14s) FAIL + exit_code=1 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name examples --report --output /logs/artifacts/examples.xml ./artifacts/kuttl-report.xml time="2023-11-06T08:59:17Z" level=debug msg="Setting a new name for the test suites" time="2023-11-06T08:59:17Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-11-06T08:59:17Z" level=debug msg="normalizing test case names" time="2023-11-06T08:59:17Z" level=debug msg="examples/artifacts -> examples_artifacts" time="2023-11-06T08:59:17Z" level=debug msg="examples/examples-service-types -> examples_examples_service_types" time="2023-11-06T08:59:17Z" level=debug msg="examples/examples-with-sampling -> examples_examples_with_sampling" time="2023-11-06T08:59:17Z" level=debug msg="examples/examples-with-cassandra -> examples_examples_with_cassandra" time="2023-11-06T08:59:17Z" level=debug msg="examples/examples-with-badger-and-volume -> examples_examples_with_badger_and_volume" time="2023-11-06T08:59:17Z" level=debug msg="examples/examples-with-badger -> examples_examples_with_badger" time="2023-11-06T08:59:17Z" level=debug msg="examples/examples-simplest -> examples_examples_simplest" time="2023-11-06T08:59:17Z" level=debug msg="examples/examples-simple-prod-with-volumes -> examples_examples_simple_prod_with_volumes" time="2023-11-06T08:59:17Z" level=debug msg="examples/examples-simple-prod -> examples_examples_simple_prod" time="2023-11-06T08:59:17Z" level=debug msg="examples/examples-business-application-injected-sidecar -> examples_examples_business_application_injected_sidecar" time="2023-11-06T08:59:17Z" level=debug msg="examples/examples-openshift-with-htpasswd -> examples_examples_openshift_with_htpasswd" time="2023-11-06T08:59:17Z" level=debug msg="examples/examples-openshift-agent-as-daemonset -> examples_examples_openshift_agent_as_daemonset" time="2023-11-06T08:59:17Z" level=debug msg="examples/examples-collector-with-priority-class -> examples_examples_collector_with_priority_class" time="2023-11-06T08:59:17Z" level=debug msg="examples/examples-all-in-one-with-options -> examples_examples_all_in_one_with_options" time="2023-11-06T08:59:17Z" level=debug msg="examples/examples-auto-provision-kafka -> examples_examples_auto_provision_kafka" time="2023-11-06T08:59:17Z" level=debug msg="examples/examples-agent-with-priority-class -> examples_examples_agent_with_priority_class" time="2023-11-06T08:59:17Z" level=debug msg="examples/examples-agent-as-daemonset -> examples_examples_agent_as_daemonset" +---------------------------------------------------------+--------+ | NAME | RESULT | +---------------------------------------------------------+--------+ | examples_artifacts | passed | | examples_examples_service_types | passed | | examples_examples_with_sampling | passed | | examples_examples_with_cassandra | passed | | examples_examples_with_badger_and_volume | passed | | examples_examples_with_badger | passed | | examples_examples_simplest | passed | | examples_examples_simple_prod_with_volumes | passed | | examples_examples_simple_prod | passed | | examples_examples_business_application_injected_sidecar | passed | | examples_examples_openshift_with_htpasswd | passed | | examples_examples_openshift_agent_as_daemonset | passed | | examples_examples_collector_with_priority_class | passed | | examples_examples_all_in_one_with_options | passed | | examples_examples_auto_provision_kafka | passed | | examples_examples_agent_with_priority_class | failed | | examples_examples_agent_as_daemonset | passed | +---------------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 1 -gt 0 ']' + count=1 + '[' 1 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh generate false true + '[' 3 -ne 3 ']' + test_suite_name=generate + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/generate.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-generate make[2]: Entering directory '/tmp/jaeger-tests' test -s /tmp/jaeger-tests/bin/operator-sdk || curl -sLo /tmp/jaeger-tests/bin/operator-sdk https://github.com/operator-framework/operator-sdk/releases/download/v1.27.0/operator-sdk_`go env GOOS`_`go env GOARCH` ./hack/install/install-golangci-lint.sh Installing golangci-lint golangci-lint 1.53.2 is installed already ./hack/install/install-goimports.sh Installing goimports Try 0... go install golang.org/x/tools/cmd/goimports@v0.1.12 >>>> Formatting code... ./.ci/format.sh >>>> Building... ./hack/install/install-dependencies.sh Installing go dependencies Try 0... go mod download GOOS= GOARCH= CGO_ENABLED=0 GO111MODULE=on go build -ldflags "-X "github.com/jaegertracing/jaeger-operator/pkg/version".version="1.49.0" -X "github.com/jaegertracing/jaeger-operator/pkg/version".buildDate=2023-11-06T08:59:20Z -X "github.com/jaegertracing/jaeger-operator/pkg/version".defaultJaeger="1.49.0"" -o "bin/jaeger-operator" main.go JAEGER_VERSION="1.49.0" ./tests/e2e/generate/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-04-120954 True False 45m Cluster version is 4.15.0-0.nightly-2023-11-04-120954' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-04-120954 True False 45m Cluster version is 4.15.0-0.nightly-2023-11-04-120954' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/generate/render.sh ++ export SUITE_DIR=./tests/e2e/generate ++ SUITE_DIR=./tests/e2e/generate ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/generate ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + '[' true = true ']' + skip_test generate 'This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 2 -ne 2 ']' + test_name=generate + message='This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/generate/_build + '[' _build '!=' _build ']' + rm -rf generate + warning 'generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed\e[0m' WAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running generate E2E tests' Running generate E2E tests + cd tests/e2e/generate/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1891468343 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 08:59:47 | artifacts | Creating namespace: kuttl-test-master-parakeet logger.go:42: 08:59:47 | artifacts | artifacts events from ns kuttl-test-master-parakeet: logger.go:42: 08:59:47 | artifacts | Deleting namespace: kuttl-test-master-parakeet === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (6.32s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.26s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name generate --report --output /logs/artifacts/generate.xml ./artifacts/kuttl-report.xml time="2023-11-06T08:59:53Z" level=debug msg="Setting a new name for the test suites" time="2023-11-06T08:59:53Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-11-06T08:59:53Z" level=debug msg="normalizing test case names" time="2023-11-06T08:59:53Z" level=debug msg="generate/artifacts -> generate_artifacts" +--------------------+--------+ | NAME | RESULT | +--------------------+--------+ | generate_artifacts | passed | +--------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 1 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + '[' 1 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh miscellaneous false true + '[' 3 -ne 3 ']' + test_suite_name=miscellaneous + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/miscellaneous.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-miscellaneous make[2]: Entering directory '/tmp/jaeger-tests' SKIP_ES_EXTERNAL=true ./tests/e2e/miscellaneous/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-04-120954 True False 45m Cluster version is 4.15.0-0.nightly-2023-11-04-120954' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-04-120954 True False 45m Cluster version is 4.15.0-0.nightly-2023-11-04-120954' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/miscellaneous/render.sh ++ export SUITE_DIR=./tests/e2e/miscellaneous ++ SUITE_DIR=./tests/e2e/miscellaneous ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/miscellaneous ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + '[' true = true ']' + skip_test cassandra-spark 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=cassandra-spark + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + rm -rf cassandra-spark + warning 'cassandra-spark: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: cassandra-spark: Test not supported in OpenShift\e[0m' WAR: cassandra-spark: Test not supported in OpenShift + start_test collector-autoscale + '[' 1 -ne 1 ']' + test_name=collector-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-autoscale\e[0m' Rendering files for test collector-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p collector-autoscale + cd collector-autoscale + jaeger_name=simple-prod + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + ELASTICSEARCH_NODECOUNT=1 + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.resources.requests.memory="200m"' 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.autoscale=true 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.minReplicas=1 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.maxReplicas=2 01-install.yaml + kubectl api-versions + grep autoscaling/v2beta2 -q + rm ./04-assert.yaml + generate_otlp_e2e_tests http + test_protocol=http + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-http\e[0m' Rendering files for test collector-otlp-allinone-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-autoscale + '[' collector-autoscale '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-http + cd collector-otlp-allinone-http + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger http true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-http\e[0m' Rendering files for test collector-otlp-production-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-http + '[' collector-otlp-allinone-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-http + cd collector-otlp-production-http + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger http true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + generate_otlp_e2e_tests grpc + test_protocol=grpc + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-grpc\e[0m' Rendering files for test collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-http + '[' collector-otlp-production-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-grpc + cd collector-otlp-allinone-grpc + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-grpc\e[0m' Rendering files for test collector-otlp-production-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-grpc + '[' collector-otlp-allinone-grpc '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-grpc + cd collector-otlp-production-grpc + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + '[' true = true ']' + skip_test istio 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=istio + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-grpc + '[' collector-otlp-production-grpc '!=' _build ']' + cd .. + rm -rf istio + warning 'istio: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: istio: Test not supported in OpenShift\e[0m' WAR: istio: Test not supported in OpenShift + '[' true = true ']' + skip_test outside-cluster 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=outside-cluster + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + rm -rf outside-cluster + warning 'outside-cluster: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: outside-cluster: Test not supported in OpenShift\e[0m' WAR: outside-cluster: Test not supported in OpenShift + start_test set-custom-img + '[' 1 -ne 1 ']' + test_name=set-custom-img + echo =========================================================================== =========================================================================== + info 'Rendering files for test set-custom-img' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test set-custom-img\e[0m' Rendering files for test set-custom-img + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p set-custom-img + cd set-custom-img + jaeger_name=my-jaeger + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.image="test"' ./02-install.yaml + '[' true = true ']' + skip_test non-cluster-wide 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=non-cluster-wide + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/set-custom-img + '[' set-custom-img '!=' _build ']' + cd .. + rm -rf non-cluster-wide + warning 'non-cluster-wide: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: non-cluster-wide: Test not supported in OpenShift\e[0m' WAR: non-cluster-wide: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running miscellaneous E2E tests' Running miscellaneous E2E tests + cd tests/e2e/miscellaneous/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1891468343 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 7 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/collector-autoscale === PAUSE kuttl/harness/collector-autoscale === RUN kuttl/harness/collector-otlp-allinone-grpc === PAUSE kuttl/harness/collector-otlp-allinone-grpc === RUN kuttl/harness/collector-otlp-allinone-http === PAUSE kuttl/harness/collector-otlp-allinone-http === RUN kuttl/harness/collector-otlp-production-grpc === PAUSE kuttl/harness/collector-otlp-production-grpc === RUN kuttl/harness/collector-otlp-production-http === PAUSE kuttl/harness/collector-otlp-production-http === RUN kuttl/harness/set-custom-img === PAUSE kuttl/harness/set-custom-img === CONT kuttl/harness/artifacts logger.go:42: 09:00:04 | artifacts | Creating namespace: kuttl-test-proper-lemming logger.go:42: 09:00:04 | artifacts | artifacts events from ns kuttl-test-proper-lemming: logger.go:42: 09:00:04 | artifacts | Deleting namespace: kuttl-test-proper-lemming === CONT kuttl/harness/collector-otlp-production-grpc logger.go:42: 09:00:10 | collector-otlp-production-grpc | Creating namespace: kuttl-test-sterling-arachnid logger.go:42: 09:00:10 | collector-otlp-production-grpc/1-install | starting test step 1-install logger.go:42: 09:00:10 | collector-otlp-production-grpc/1-install | Jaeger:kuttl-test-sterling-arachnid/my-jaeger created logger.go:42: 09:00:46 | collector-otlp-production-grpc/1-install | test step completed 1-install logger.go:42: 09:00:46 | collector-otlp-production-grpc/2-smoke-test | starting test step 2-smoke-test logger.go:42: 09:00:46 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 09:00:47 | collector-otlp-production-grpc/2-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 09:00:53 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c REPORTING_PROTOCOL=grpc ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 09:00:54 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 09:00:54 | collector-otlp-production-grpc/2-smoke-test | job.batch/report-span created logger.go:42: 09:00:54 | collector-otlp-production-grpc/2-smoke-test | job.batch/check-span created logger.go:42: 09:01:13 | collector-otlp-production-grpc/2-smoke-test | test step completed 2-smoke-test logger.go:42: 09:01:13 | collector-otlp-production-grpc | collector-otlp-production-grpc events from ns kuttl-test-sterling-arachnid: logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:15 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttlteststerlingarachnidmyjaeger-1-696b5c78c4 SuccessfulCreate Created pod: elasticsearch-cdm-kuttlteststerlingarachnidmyjaeger-1-696bw6smf replicaset-controller logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:15 +0000 UTC Normal Pod elasticsearch-cdm-kuttlteststerlingarachnidmyjaeger-1-696bw6smf Binding Scheduled Successfully assigned kuttl-test-sterling-arachnid/elasticsearch-cdm-kuttlteststerlingarachnidmyjaeger-1-696bw6smf to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:15 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttlteststerlingarachnidmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttlteststerlingarachnidmyjaeger-1-696b5c78c4 to 1 deployment-controller logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:16 +0000 UTC Warning Pod elasticsearch-cdm-kuttlteststerlingarachnidmyjaeger-1-696bw6smf FailedMount MountVolume.SetUp failed for volume "elasticsearch-metrics" : secret "elasticsearch-metrics" not found kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:16 +0000 UTC Normal Pod elasticsearch-cdm-kuttlteststerlingarachnidmyjaeger-1-696bw6smf AddedInterface Add eth0 [10.129.2.62/23] from ovn-kubernetes logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttlteststerlingarachnidmyjaeger-1-696bw6smf.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttlteststerlingarachnidmyjaeger-1-696bw6smf.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttlteststerlingarachnidmyjaeger-1-696bw6smf.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttlteststerlingarachnidmyjaeger-1-696bw6smf.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttlteststerlingarachnidmyjaeger-1-696bw6smf.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttlteststerlingarachnidmyjaeger-1-696bw6smf.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:32 +0000 UTC Warning Pod elasticsearch-cdm-kuttlteststerlingarachnidmyjaeger-1-696bw6smf.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:42 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-5489f5bd9b to 1 deployment-controller logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:43 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-55ck7 Binding Scheduled Successfully assigned kuttl-test-sterling-arachnid/my-jaeger-collector-5489f5bd9b-55ck7 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:43 +0000 UTC Warning Pod my-jaeger-collector-5489f5bd9b-55ck7 FailedMount MountVolume.SetUp failed for volume "my-jaeger-collector-tls-config-volume" : secret "my-jaeger-collector-headless-tls" not found kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:43 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5489f5bd9b SuccessfulCreate Created pod: my-jaeger-collector-5489f5bd9b-55ck7 replicaset-controller logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:43 +0000 UTC Normal Pod my-jaeger-query-6ffd4764c8-lvnw7 Binding Scheduled Successfully assigned kuttl-test-sterling-arachnid/my-jaeger-query-6ffd4764c8-lvnw7 to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:43 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-6ffd4764c8 SuccessfulCreate Created pod: my-jaeger-query-6ffd4764c8-lvnw7 replicaset-controller logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:43 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-6ffd4764c8 to 1 deployment-controller logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:44 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-55ck7 AddedInterface Add eth0 [10.128.2.85/23] from ovn-kubernetes logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:44 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-55ck7.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:44 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-55ck7.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:44 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-55ck7.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:44 +0000 UTC Warning Pod my-jaeger-query-6ffd4764c8-lvnw7 FailedMount MountVolume.SetUp failed for volume "my-jaeger-ui-oauth-proxy-tls" : failed to sync secret cache: timed out waiting for the condition kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:45 +0000 UTC Normal Pod my-jaeger-query-6ffd4764c8-lvnw7 AddedInterface Add eth0 [10.131.0.65/23] from ovn-kubernetes logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:45 +0000 UTC Normal Pod my-jaeger-query-6ffd4764c8-lvnw7.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:45 +0000 UTC Normal Pod my-jaeger-query-6ffd4764c8-lvnw7.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:45 +0000 UTC Normal Pod my-jaeger-query-6ffd4764c8-lvnw7.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:45 +0000 UTC Normal Pod my-jaeger-query-6ffd4764c8-lvnw7.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:45 +0000 UTC Normal Pod my-jaeger-query-6ffd4764c8-lvnw7.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:45 +0000 UTC Normal Pod my-jaeger-query-6ffd4764c8-lvnw7.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:45 +0000 UTC Normal Pod my-jaeger-query-6ffd4764c8-lvnw7.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:45 +0000 UTC Normal Pod my-jaeger-query-6ffd4764c8-lvnw7.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:45 +0000 UTC Normal Pod my-jaeger-query-6ffd4764c8-lvnw7.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:49 +0000 UTC Normal Pod my-jaeger-query-6ffd4764c8-lvnw7.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:49 +0000 UTC Normal Pod my-jaeger-query-6ffd4764c8-lvnw7.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:49 +0000 UTC Normal Pod my-jaeger-query-6ffd4764c8-lvnw7.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:49 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-6ffd4764c8 SuccessfulDelete Deleted pod: my-jaeger-query-6ffd4764c8-lvnw7 replicaset-controller logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:49 +0000 UTC Normal Pod my-jaeger-query-7ff576d46f-bwtz8 Binding Scheduled Successfully assigned kuttl-test-sterling-arachnid/my-jaeger-query-7ff576d46f-bwtz8 to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:49 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-7ff576d46f SuccessfulCreate Created pod: my-jaeger-query-7ff576d46f-bwtz8 replicaset-controller logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:49 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-6ffd4764c8 to 0 from 1 deployment-controller logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:49 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-7ff576d46f to 1 deployment-controller logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:50 +0000 UTC Normal Pod my-jaeger-query-7ff576d46f-bwtz8 AddedInterface Add eth0 [10.131.0.66/23] from ovn-kubernetes logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:50 +0000 UTC Normal Pod my-jaeger-query-7ff576d46f-bwtz8.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:50 +0000 UTC Normal Pod my-jaeger-query-7ff576d46f-bwtz8.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:50 +0000 UTC Normal Pod my-jaeger-query-7ff576d46f-bwtz8.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:50 +0000 UTC Normal Pod my-jaeger-query-7ff576d46f-bwtz8.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:50 +0000 UTC Normal Pod my-jaeger-query-7ff576d46f-bwtz8.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:50 +0000 UTC Normal Pod my-jaeger-query-7ff576d46f-bwtz8.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:50 +0000 UTC Normal Pod my-jaeger-query-7ff576d46f-bwtz8.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:50 +0000 UTC Normal Pod my-jaeger-query-7ff576d46f-bwtz8.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:50 +0000 UTC Normal Pod my-jaeger-query-7ff576d46f-bwtz8.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:54 +0000 UTC Normal Pod check-span-q5989 Binding Scheduled Successfully assigned kuttl-test-sterling-arachnid/check-span-q5989 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:54 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-q5989 job-controller logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:54 +0000 UTC Normal Pod report-span-fwhn4 Binding Scheduled Successfully assigned kuttl-test-sterling-arachnid/report-span-fwhn4 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:54 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-fwhn4 job-controller logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:55 +0000 UTC Normal Pod check-span-q5989 AddedInterface Add eth0 [10.128.2.87/23] from ovn-kubernetes logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:55 +0000 UTC Normal Pod check-span-q5989.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:55 +0000 UTC Normal Pod check-span-q5989.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:55 +0000 UTC Normal Pod check-span-q5989.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:55 +0000 UTC Normal Pod report-span-fwhn4 AddedInterface Add eth0 [10.128.2.86/23] from ovn-kubernetes logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:55 +0000 UTC Normal Pod report-span-fwhn4.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:55 +0000 UTC Normal Pod report-span-fwhn4.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:55 +0000 UTC Normal Pod report-span-fwhn4.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:58 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:58 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:00:58 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:01:13 | collector-otlp-production-grpc | 2023-11-06 09:01:13 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 09:01:13 | collector-otlp-production-grpc | Deleting namespace: kuttl-test-sterling-arachnid === CONT kuttl/harness/set-custom-img logger.go:42: 09:01:25 | set-custom-img | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:01:25 | set-custom-img | Ignoring check-collector-img.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:01:25 | set-custom-img | Creating namespace: kuttl-test-ideal-lynx logger.go:42: 09:01:25 | set-custom-img/1-install | starting test step 1-install logger.go:42: 09:01:25 | set-custom-img/1-install | Jaeger:kuttl-test-ideal-lynx/my-jaeger created logger.go:42: 09:02:04 | set-custom-img/1-install | test step completed 1-install logger.go:42: 09:02:04 | set-custom-img/2-install | starting test step 2-install logger.go:42: 09:02:04 | set-custom-img/2-install | Jaeger:kuttl-test-ideal-lynx/my-jaeger updated logger.go:42: 09:02:04 | set-custom-img/2-install | test step completed 2-install logger.go:42: 09:02:04 | set-custom-img/3-check-image | starting test step 3-check-image logger.go:42: 09:02:04 | set-custom-img/3-check-image | running command: [sh -c ./check-collector-img.sh] logger.go:42: 09:02:04 | set-custom-img/3-check-image | Collector image missmatch. Expected: test. Has: registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856 logger.go:42: 09:02:09 | set-custom-img/3-check-image | Collector image asserted properly! logger.go:42: 09:02:09 | set-custom-img/3-check-image | test step completed 3-check-image logger.go:42: 09:02:09 | set-custom-img | set-custom-img events from ns kuttl-test-ideal-lynx: logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:01:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestideallynxmyjaeger-1-674f9bcfbd-c2vhb Binding Scheduled Successfully assigned kuttl-test-ideal-lynx/elasticsearch-cdm-kuttltestideallynxmyjaeger-1-674f9bcfbd-c2vhb to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:01:34 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestideallynxmyjaeger-1-674f9bcfbd-c2vhb FailedMount MountVolume.SetUp failed for volume "elasticsearch-metrics" : secret "elasticsearch-metrics" not found kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:01:34 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestideallynxmyjaeger-1-674f9bcfbd SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestideallynxmyjaeger-1-674f9bcfbd-c2vhb replicaset-controller logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:01:34 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestideallynxmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestideallynxmyjaeger-1-674f9bcfbd to 1 deployment-controller logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:01:35 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestideallynxmyjaeger-1-674f9bcfbd-c2vhb AddedInterface Add eth0 [10.129.2.63/23] from ovn-kubernetes logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:01:35 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestideallynxmyjaeger-1-674f9bcfbd-c2vhb.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:01:35 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestideallynxmyjaeger-1-674f9bcfbd-c2vhb.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:01:35 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestideallynxmyjaeger-1-674f9bcfbd-c2vhb.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:01:35 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestideallynxmyjaeger-1-674f9bcfbd-c2vhb.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:01:35 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestideallynxmyjaeger-1-674f9bcfbd-c2vhb.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:01:35 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestideallynxmyjaeger-1-674f9bcfbd-c2vhb.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:01:45 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestideallynxmyjaeger-1-674f9bcfbd-c2vhb.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:01:50 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestideallynxmyjaeger-1-674f9bcfbd-c2vhb.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:01 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-98whz Binding Scheduled Successfully assigned kuttl-test-ideal-lynx/my-jaeger-collector-5489f5bd9b-98whz to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:01 +0000 UTC Warning Pod my-jaeger-collector-5489f5bd9b-98whz FailedMount MountVolume.SetUp failed for volume "my-jaeger-collector-tls-config-volume" : secret "my-jaeger-collector-headless-tls" not found kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:01 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5489f5bd9b SuccessfulCreate Created pod: my-jaeger-collector-5489f5bd9b-98whz replicaset-controller logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:01 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-5489f5bd9b to 1 deployment-controller logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:01 +0000 UTC Normal Pod my-jaeger-query-5689cf577b-dcmm8 Binding Scheduled Successfully assigned kuttl-test-ideal-lynx/my-jaeger-query-5689cf577b-dcmm8 to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:01 +0000 UTC Normal Pod my-jaeger-query-5689cf577b-dcmm8 AddedInterface Add eth0 [10.131.0.67/23] from ovn-kubernetes logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:01 +0000 UTC Normal Pod my-jaeger-query-5689cf577b-dcmm8.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:01 +0000 UTC Normal Pod my-jaeger-query-5689cf577b-dcmm8.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:01 +0000 UTC Normal Pod my-jaeger-query-5689cf577b-dcmm8.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:01 +0000 UTC Normal Pod my-jaeger-query-5689cf577b-dcmm8.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:01 +0000 UTC Normal Pod my-jaeger-query-5689cf577b-dcmm8.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:01 +0000 UTC Normal Pod my-jaeger-query-5689cf577b-dcmm8.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:01 +0000 UTC Normal Pod my-jaeger-query-5689cf577b-dcmm8.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:01 +0000 UTC Normal Pod my-jaeger-query-5689cf577b-dcmm8.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:01 +0000 UTC Normal Pod my-jaeger-query-5689cf577b-dcmm8.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:01 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-5689cf577b SuccessfulCreate Created pod: my-jaeger-query-5689cf577b-dcmm8 replicaset-controller logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:01 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-5689cf577b to 1 deployment-controller logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:02 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-98whz AddedInterface Add eth0 [10.128.2.88/23] from ovn-kubernetes logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:02 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-98whz.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:02 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-98whz.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:02 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-98whz.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:05 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-98whz.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:05 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5489f5bd9b SuccessfulDelete Deleted pod: my-jaeger-collector-5489f5bd9b-98whz replicaset-controller logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:05 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled down replica set my-jaeger-collector-5489f5bd9b to 0 from 1 deployment-controller logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:06 +0000 UTC Normal Pod my-jaeger-collector-7fd96ccd44-jbs28 Binding Scheduled Successfully assigned kuttl-test-ideal-lynx/my-jaeger-collector-7fd96ccd44-jbs28 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:06 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-7fd96ccd44 SuccessfulCreate Created pod: my-jaeger-collector-7fd96ccd44-jbs28 replicaset-controller logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:06 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-7fd96ccd44 to 1 deployment-controller logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:07 +0000 UTC Normal Pod my-jaeger-collector-7fd96ccd44-jbs28 AddedInterface Add eth0 [10.128.2.89/23] from ovn-kubernetes logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:07 +0000 UTC Normal Pod my-jaeger-collector-7fd96ccd44-jbs28.spec.containers{jaeger-collector} Pulling Pulling image "test" kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:07 +0000 UTC Warning Pod my-jaeger-collector-7fd96ccd44-jbs28.spec.containers{jaeger-collector} Failed Failed to pull image "test": reading manifest latest in docker.io/library/test: requested access to the resource is denied kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:07 +0000 UTC Warning Pod my-jaeger-collector-7fd96ccd44-jbs28.spec.containers{jaeger-collector} Failed Error: ErrImagePull kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:07 +0000 UTC Normal Pod my-jaeger-collector-7fd96ccd44-jbs28.spec.containers{jaeger-collector} BackOff Back-off pulling image "test" kubelet logger.go:42: 09:02:09 | set-custom-img | 2023-11-06 09:02:07 +0000 UTC Warning Pod my-jaeger-collector-7fd96ccd44-jbs28.spec.containers{jaeger-collector} Failed Error: ImagePullBackOff kubelet logger.go:42: 09:02:09 | set-custom-img | Deleting namespace: kuttl-test-ideal-lynx === CONT kuttl/harness/collector-otlp-production-http logger.go:42: 09:02:15 | collector-otlp-production-http | Creating namespace: kuttl-test-electric-finch logger.go:42: 09:02:15 | collector-otlp-production-http/1-install | starting test step 1-install logger.go:42: 09:02:15 | collector-otlp-production-http/1-install | Jaeger:kuttl-test-electric-finch/my-jaeger created logger.go:42: 09:02:51 | collector-otlp-production-http/1-install | test step completed 1-install logger.go:42: 09:02:51 | collector-otlp-production-http/2-smoke-test | starting test step 2-smoke-test logger.go:42: 09:02:51 | collector-otlp-production-http/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 09:02:53 | collector-otlp-production-http/2-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 09:02:59 | collector-otlp-production-http/2-smoke-test | running command: [sh -c REPORTING_PROTOCOL=http ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 09:02:59 | collector-otlp-production-http/2-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 09:03:00 | collector-otlp-production-http/2-smoke-test | job.batch/report-span created logger.go:42: 09:03:00 | collector-otlp-production-http/2-smoke-test | job.batch/check-span created logger.go:42: 09:03:11 | collector-otlp-production-http/2-smoke-test | test step completed 2-smoke-test logger.go:42: 09:03:11 | collector-otlp-production-http | collector-otlp-production-http events from ns kuttl-test-electric-finch: logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:22 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestelectricfinchmyjaeger-1-7d65bdd5b6 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestelectricfinchmyjaeger-1-7d65bddxzhng replicaset-controller logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestelectricfinchmyjaeger-1-7d65bddxzhng Binding Scheduled Successfully assigned kuttl-test-electric-finch/elasticsearch-cdm-kuttltestelectricfinchmyjaeger-1-7d65bddxzhng to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:22 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestelectricfinchmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestelectricfinchmyjaeger-1-7d65bdd5b6 to 1 deployment-controller logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:23 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestelectricfinchmyjaeger-1-7d65bddxzhng FailedMount MountVolume.SetUp failed for volume "elasticsearch-metrics" : secret "elasticsearch-metrics" not found kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestelectricfinchmyjaeger-1-7d65bddxzhng AddedInterface Add eth0 [10.129.2.64/23] from ovn-kubernetes logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:23 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestelectricfinchmyjaeger-1-7d65bddxzhng.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestelectricfinchmyjaeger-1-7d65bddxzhng.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestelectricfinchmyjaeger-1-7d65bddxzhng.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestelectricfinchmyjaeger-1-7d65bddxzhng.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestelectricfinchmyjaeger-1-7d65bddxzhng.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestelectricfinchmyjaeger-1-7d65bddxzhng.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:39 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestelectricfinchmyjaeger-1-7d65bddxzhng.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:49 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-7rthr Binding Scheduled Successfully assigned kuttl-test-electric-finch/my-jaeger-collector-5489f5bd9b-7rthr to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:49 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5489f5bd9b SuccessfulCreate Created pod: my-jaeger-collector-5489f5bd9b-7rthr replicaset-controller logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:49 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-5489f5bd9b to 1 deployment-controller logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:49 +0000 UTC Normal Pod my-jaeger-query-659f5bb458-p84wm Binding Scheduled Successfully assigned kuttl-test-electric-finch/my-jaeger-query-659f5bb458-p84wm to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:49 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-659f5bb458 SuccessfulCreate Created pod: my-jaeger-query-659f5bb458-p84wm replicaset-controller logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:49 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-659f5bb458 to 1 deployment-controller logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:50 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-7rthr AddedInterface Add eth0 [10.128.2.90/23] from ovn-kubernetes logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:50 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-7rthr.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:50 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-7rthr.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:50 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-7rthr.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:50 +0000 UTC Normal Pod my-jaeger-query-659f5bb458-p84wm AddedInterface Add eth0 [10.131.0.68/23] from ovn-kubernetes logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:50 +0000 UTC Normal Pod my-jaeger-query-659f5bb458-p84wm.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:50 +0000 UTC Normal Pod my-jaeger-query-659f5bb458-p84wm.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:50 +0000 UTC Normal Pod my-jaeger-query-659f5bb458-p84wm.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:50 +0000 UTC Normal Pod my-jaeger-query-659f5bb458-p84wm.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:50 +0000 UTC Normal Pod my-jaeger-query-659f5bb458-p84wm.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:50 +0000 UTC Normal Pod my-jaeger-query-659f5bb458-p84wm.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:50 +0000 UTC Normal Pod my-jaeger-query-659f5bb458-p84wm.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:50 +0000 UTC Normal Pod my-jaeger-query-659f5bb458-p84wm.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:50 +0000 UTC Normal Pod my-jaeger-query-659f5bb458-p84wm.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:54 +0000 UTC Normal Pod my-jaeger-query-659f5bb458-p84wm.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:54 +0000 UTC Normal Pod my-jaeger-query-659f5bb458-p84wm.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:54 +0000 UTC Normal Pod my-jaeger-query-659f5bb458-p84wm.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:54 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-659f5bb458 SuccessfulDelete Deleted pod: my-jaeger-query-659f5bb458-p84wm replicaset-controller logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:54 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-659f5bb458 to 0 from 1 deployment-controller logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:55 +0000 UTC Normal Pod my-jaeger-query-578d6d44cd-vhspt Binding Scheduled Successfully assigned kuttl-test-electric-finch/my-jaeger-query-578d6d44cd-vhspt to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:55 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-578d6d44cd SuccessfulCreate Created pod: my-jaeger-query-578d6d44cd-vhspt replicaset-controller logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:55 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-578d6d44cd to 1 deployment-controller logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:56 +0000 UTC Normal Pod my-jaeger-query-578d6d44cd-vhspt AddedInterface Add eth0 [10.131.0.69/23] from ovn-kubernetes logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:56 +0000 UTC Normal Pod my-jaeger-query-578d6d44cd-vhspt.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:56 +0000 UTC Normal Pod my-jaeger-query-578d6d44cd-vhspt.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:56 +0000 UTC Normal Pod my-jaeger-query-578d6d44cd-vhspt.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:56 +0000 UTC Normal Pod my-jaeger-query-578d6d44cd-vhspt.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:56 +0000 UTC Normal Pod my-jaeger-query-578d6d44cd-vhspt.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:56 +0000 UTC Normal Pod my-jaeger-query-578d6d44cd-vhspt.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:56 +0000 UTC Normal Pod my-jaeger-query-578d6d44cd-vhspt.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:56 +0000 UTC Normal Pod my-jaeger-query-578d6d44cd-vhspt.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:02:56 +0000 UTC Normal Pod my-jaeger-query-578d6d44cd-vhspt.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:03:00 +0000 UTC Normal Pod check-span-k9rq5 Binding Scheduled Successfully assigned kuttl-test-electric-finch/check-span-k9rq5 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:03:00 +0000 UTC Normal Pod check-span-k9rq5 AddedInterface Add eth0 [10.128.2.92/23] from ovn-kubernetes logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:03:00 +0000 UTC Normal Pod check-span-k9rq5.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:03:00 +0000 UTC Normal Pod check-span-k9rq5.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:03:00 +0000 UTC Normal Pod check-span-k9rq5.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:03:00 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-k9rq5 job-controller logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:03:00 +0000 UTC Normal Pod report-span-25gjd Binding Scheduled Successfully assigned kuttl-test-electric-finch/report-span-25gjd to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:03:00 +0000 UTC Normal Pod report-span-25gjd AddedInterface Add eth0 [10.128.2.91/23] from ovn-kubernetes logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:03:00 +0000 UTC Normal Pod report-span-25gjd.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:03:00 +0000 UTC Normal Pod report-span-25gjd.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:03:00 +0000 UTC Normal Pod report-span-25gjd.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:03:00 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-25gjd job-controller logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:03:04 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:03:04 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:03:04 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:03:11 | collector-otlp-production-http | 2023-11-06 09:03:10 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 09:03:11 | collector-otlp-production-http | Deleting namespace: kuttl-test-electric-finch === CONT kuttl/harness/collector-otlp-allinone-grpc logger.go:42: 09:03:23 | collector-otlp-allinone-grpc | Creating namespace: kuttl-test-deep-flea logger.go:42: 09:03:23 | collector-otlp-allinone-grpc/0-install | starting test step 0-install logger.go:42: 09:03:23 | collector-otlp-allinone-grpc/0-install | Jaeger:kuttl-test-deep-flea/my-jaeger created logger.go:42: 09:03:29 | collector-otlp-allinone-grpc/0-install | test step completed 0-install logger.go:42: 09:03:29 | collector-otlp-allinone-grpc/1-smoke-test | starting test step 1-smoke-test logger.go:42: 09:03:29 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 09:03:30 | collector-otlp-allinone-grpc/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 09:03:36 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=grpc ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 09:03:37 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 09:03:37 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/report-span created logger.go:42: 09:03:37 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/check-span created logger.go:42: 09:03:56 | collector-otlp-allinone-grpc/1-smoke-test | test step completed 1-smoke-test logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | collector-otlp-allinone-grpc events from ns kuttl-test-deep-flea: logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:26 +0000 UTC Normal Pod my-jaeger-85fb5b5f98-wrc9p Binding Scheduled Successfully assigned kuttl-test-deep-flea/my-jaeger-85fb5b5f98-wrc9p to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:26 +0000 UTC Warning Pod my-jaeger-85fb5b5f98-wrc9p FailedMount MountVolume.SetUp failed for volume "my-jaeger-collector-tls-config-volume" : secret "my-jaeger-collector-headless-tls" not found kubelet logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:26 +0000 UTC Normal ReplicaSet.apps my-jaeger-85fb5b5f98 SuccessfulCreate Created pod: my-jaeger-85fb5b5f98-wrc9p replicaset-controller logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:26 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-85fb5b5f98 to 1 deployment-controller logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:27 +0000 UTC Normal Pod my-jaeger-85fb5b5f98-wrc9p AddedInterface Add eth0 [10.129.2.65/23] from ovn-kubernetes logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:27 +0000 UTC Normal Pod my-jaeger-85fb5b5f98-wrc9p.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:27 +0000 UTC Normal Pod my-jaeger-85fb5b5f98-wrc9p.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:27 +0000 UTC Normal Pod my-jaeger-85fb5b5f98-wrc9p.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:27 +0000 UTC Normal Pod my-jaeger-85fb5b5f98-wrc9p.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:27 +0000 UTC Normal Pod my-jaeger-85fb5b5f98-wrc9p.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:27 +0000 UTC Normal Pod my-jaeger-85fb5b5f98-wrc9p.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:33 +0000 UTC Normal Pod my-jaeger-78cb469f5d-g4z76 Binding Scheduled Successfully assigned kuttl-test-deep-flea/my-jaeger-78cb469f5d-g4z76 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:33 +0000 UTC Normal ReplicaSet.apps my-jaeger-78cb469f5d SuccessfulCreate Created pod: my-jaeger-78cb469f5d-g4z76 replicaset-controller logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:33 +0000 UTC Normal Pod my-jaeger-85fb5b5f98-wrc9p.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:33 +0000 UTC Normal Pod my-jaeger-85fb5b5f98-wrc9p.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:33 +0000 UTC Normal ReplicaSet.apps my-jaeger-85fb5b5f98 SuccessfulDelete Deleted pod: my-jaeger-85fb5b5f98-wrc9p replicaset-controller logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:33 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-85fb5b5f98 to 0 from 1 deployment-controller logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:33 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-78cb469f5d to 1 deployment-controller logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:34 +0000 UTC Normal Pod my-jaeger-78cb469f5d-g4z76 AddedInterface Add eth0 [10.128.2.93/23] from ovn-kubernetes logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:34 +0000 UTC Normal Pod my-jaeger-78cb469f5d-g4z76.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:34 +0000 UTC Normal Pod my-jaeger-78cb469f5d-g4z76.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:34 +0000 UTC Normal Pod my-jaeger-78cb469f5d-g4z76.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:34 +0000 UTC Normal Pod my-jaeger-78cb469f5d-g4z76.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:34 +0000 UTC Normal Pod my-jaeger-78cb469f5d-g4z76.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:34 +0000 UTC Normal Pod my-jaeger-78cb469f5d-g4z76.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:37 +0000 UTC Normal Pod check-span-sbtg8 Binding Scheduled Successfully assigned kuttl-test-deep-flea/check-span-sbtg8 to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:37 +0000 UTC Normal Pod check-span-sbtg8 AddedInterface Add eth0 [10.131.0.70/23] from ovn-kubernetes logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:37 +0000 UTC Normal Pod check-span-sbtg8.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:37 +0000 UTC Normal Pod check-span-sbtg8.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:37 +0000 UTC Normal Pod check-span-sbtg8.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:37 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-sbtg8 job-controller logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:37 +0000 UTC Normal Pod report-span-8pb22 Binding Scheduled Successfully assigned kuttl-test-deep-flea/report-span-8pb22 to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:37 +0000 UTC Normal Pod report-span-8pb22 AddedInterface Add eth0 [10.129.2.66/23] from ovn-kubernetes logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:37 +0000 UTC Normal Pod report-span-8pb22.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:37 +0000 UTC Normal Pod report-span-8pb22.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:37 +0000 UTC Normal Pod report-span-8pb22.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:37 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-8pb22 job-controller logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | 2023-11-06 09:03:55 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 09:03:56 | collector-otlp-allinone-grpc | Deleting namespace: kuttl-test-deep-flea === CONT kuttl/harness/collector-otlp-allinone-http logger.go:42: 09:04:02 | collector-otlp-allinone-http | Creating namespace: kuttl-test-clever-akita logger.go:42: 09:04:02 | collector-otlp-allinone-http/0-install | starting test step 0-install logger.go:42: 09:04:03 | collector-otlp-allinone-http/0-install | Jaeger:kuttl-test-clever-akita/my-jaeger created logger.go:42: 09:04:09 | collector-otlp-allinone-http/0-install | test step completed 0-install logger.go:42: 09:04:09 | collector-otlp-allinone-http/1-smoke-test | starting test step 1-smoke-test logger.go:42: 09:04:09 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 09:04:10 | collector-otlp-allinone-http/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 09:04:16 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=http ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 09:04:16 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 09:04:17 | collector-otlp-allinone-http/1-smoke-test | job.batch/report-span created logger.go:42: 09:04:17 | collector-otlp-allinone-http/1-smoke-test | job.batch/check-span created logger.go:42: 09:04:28 | collector-otlp-allinone-http/1-smoke-test | test step completed 1-smoke-test logger.go:42: 09:04:28 | collector-otlp-allinone-http | collector-otlp-allinone-http events from ns kuttl-test-clever-akita: logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:06 +0000 UTC Normal Pod my-jaeger-c689b7975-59bmx Binding Scheduled Successfully assigned kuttl-test-clever-akita/my-jaeger-c689b7975-59bmx to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:06 +0000 UTC Warning Pod my-jaeger-c689b7975-59bmx FailedMount MountVolume.SetUp failed for volume "my-jaeger-ui-oauth-proxy-tls" : secret "my-jaeger-ui-oauth-proxy-tls" not found kubelet logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:06 +0000 UTC Normal ReplicaSet.apps my-jaeger-c689b7975 SuccessfulCreate Created pod: my-jaeger-c689b7975-59bmx replicaset-controller logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:06 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-c689b7975 to 1 deployment-controller logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:07 +0000 UTC Normal Pod my-jaeger-c689b7975-59bmx AddedInterface Add eth0 [10.129.2.67/23] from ovn-kubernetes logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:07 +0000 UTC Normal Pod my-jaeger-c689b7975-59bmx.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:07 +0000 UTC Normal Pod my-jaeger-c689b7975-59bmx.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:07 +0000 UTC Normal Pod my-jaeger-c689b7975-59bmx.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:07 +0000 UTC Normal Pod my-jaeger-c689b7975-59bmx.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:08 +0000 UTC Normal Pod my-jaeger-c689b7975-59bmx.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:08 +0000 UTC Normal Pod my-jaeger-c689b7975-59bmx.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:11 +0000 UTC Normal Pod my-jaeger-c689b7975-59bmx.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:11 +0000 UTC Normal ReplicaSet.apps my-jaeger-c689b7975 SuccessfulDelete Deleted pod: my-jaeger-c689b7975-59bmx replicaset-controller logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:11 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-c689b7975 to 0 from 1 deployment-controller logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:12 +0000 UTC Normal Pod my-jaeger-c689b7975-59bmx.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:13 +0000 UTC Normal Pod my-jaeger-75c8b54dbb-82wln Binding Scheduled Successfully assigned kuttl-test-clever-akita/my-jaeger-75c8b54dbb-82wln to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:13 +0000 UTC Normal Pod my-jaeger-75c8b54dbb-82wln AddedInterface Add eth0 [10.128.2.94/23] from ovn-kubernetes logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:13 +0000 UTC Normal Pod my-jaeger-75c8b54dbb-82wln.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:13 +0000 UTC Normal Pod my-jaeger-75c8b54dbb-82wln.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:13 +0000 UTC Normal Pod my-jaeger-75c8b54dbb-82wln.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:13 +0000 UTC Normal Pod my-jaeger-75c8b54dbb-82wln.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:13 +0000 UTC Normal Pod my-jaeger-75c8b54dbb-82wln.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:13 +0000 UTC Normal Pod my-jaeger-75c8b54dbb-82wln.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:13 +0000 UTC Normal ReplicaSet.apps my-jaeger-75c8b54dbb SuccessfulCreate Created pod: my-jaeger-75c8b54dbb-82wln replicaset-controller logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:13 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-75c8b54dbb to 1 deployment-controller logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:17 +0000 UTC Normal Pod check-span-tfrtm Binding Scheduled Successfully assigned kuttl-test-clever-akita/check-span-tfrtm to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:17 +0000 UTC Normal Pod check-span-tfrtm AddedInterface Add eth0 [10.131.0.71/23] from ovn-kubernetes logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:17 +0000 UTC Normal Pod check-span-tfrtm.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:17 +0000 UTC Normal Pod check-span-tfrtm.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:17 +0000 UTC Normal Pod check-span-tfrtm.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:17 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-tfrtm job-controller logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:17 +0000 UTC Normal Pod report-span-vfjqn Binding Scheduled Successfully assigned kuttl-test-clever-akita/report-span-vfjqn to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:17 +0000 UTC Normal Pod report-span-vfjqn AddedInterface Add eth0 [10.129.2.68/23] from ovn-kubernetes logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:17 +0000 UTC Normal Pod report-span-vfjqn.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:17 +0000 UTC Normal Pod report-span-vfjqn.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:17 +0000 UTC Normal Pod report-span-vfjqn.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:17 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-vfjqn job-controller logger.go:42: 09:04:28 | collector-otlp-allinone-http | 2023-11-06 09:04:28 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 09:04:28 | collector-otlp-allinone-http | Deleting namespace: kuttl-test-clever-akita === CONT kuttl/harness/collector-autoscale logger.go:42: 09:04:40 | collector-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:04:40 | collector-autoscale | Ignoring wait-for-hpa.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:04:40 | collector-autoscale | Creating namespace: kuttl-test-assured-mollusk logger.go:42: 09:04:40 | collector-autoscale/1-install | starting test step 1-install logger.go:42: 09:04:40 | collector-autoscale/1-install | Jaeger:kuttl-test-assured-mollusk/simple-prod created logger.go:42: 09:05:15 | collector-autoscale/1-install | test step completed 1-install logger.go:42: 09:05:15 | collector-autoscale/2-wait-for-hpa | starting test step 2-wait-for-hpa logger.go:42: 09:05:15 | collector-autoscale/2-wait-for-hpa | running command: [sh -c ./wait-for-hpa.sh] logger.go:42: 09:05:15 | collector-autoscale/2-wait-for-hpa | Some HPA metrics are not known yet logger.go:42: 09:05:16 | collector-autoscale/2-wait-for-hpa | test step completed 2-wait-for-hpa logger.go:42: 09:05:16 | collector-autoscale/3- | starting test step 3- logger.go:42: 09:05:16 | collector-autoscale/3- | test step completed 3- logger.go:42: 09:05:16 | collector-autoscale | collector-autoscale events from ns kuttl-test-assured-mollusk: logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:04:45 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestassuredmollusksimpleprod-1-6c6c9879d4 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestassuredmollusksimpleprod-1-6c6cpkckq replicaset-controller logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:04:45 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestassuredmollusksimpleprod-1-6c6cpkckq Binding Scheduled Successfully assigned kuttl-test-assured-mollusk/elasticsearch-cdm-kuttltestassuredmollusksimpleprod-1-6c6cpkckq to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:04:45 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestassuredmollusksimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestassuredmollusksimpleprod-1-6c6c9879d4 to 1 deployment-controller logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:04:46 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestassuredmollusksimpleprod-1-6c6cpkckq AddedInterface Add eth0 [10.129.2.69/23] from ovn-kubernetes logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:04:46 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestassuredmollusksimpleprod-1-6c6cpkckq.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:04:46 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestassuredmollusksimpleprod-1-6c6cpkckq.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:04:46 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestassuredmollusksimpleprod-1-6c6cpkckq.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:04:46 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestassuredmollusksimpleprod-1-6c6cpkckq.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:04:46 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestassuredmollusksimpleprod-1-6c6cpkckq.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:04:46 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestassuredmollusksimpleprod-1-6c6cpkckq.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:04:56 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestassuredmollusksimpleprod-1-6c6cpkckq.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:05:01 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestassuredmollusksimpleprod-1-6c6cpkckq.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:05:12 +0000 UTC Normal Pod simple-prod-collector-67686746b8-4wd6m Binding Scheduled Successfully assigned kuttl-test-assured-mollusk/simple-prod-collector-67686746b8-4wd6m to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:05:12 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-67686746b8 SuccessfulCreate Created pod: simple-prod-collector-67686746b8-4wd6m replicaset-controller logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:05:12 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-67686746b8 to 1 deployment-controller logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:05:12 +0000 UTC Normal Pod simple-prod-query-6b785cc5f5-c4bnk Binding Scheduled Successfully assigned kuttl-test-assured-mollusk/simple-prod-query-6b785cc5f5-c4bnk to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:05:12 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6b785cc5f5 SuccessfulCreate Created pod: simple-prod-query-6b785cc5f5-c4bnk replicaset-controller logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:05:12 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-6b785cc5f5 to 1 deployment-controller logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:05:13 +0000 UTC Normal Pod simple-prod-collector-67686746b8-4wd6m AddedInterface Add eth0 [10.128.2.95/23] from ovn-kubernetes logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:05:13 +0000 UTC Normal Pod simple-prod-collector-67686746b8-4wd6m.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:05:13 +0000 UTC Normal Pod simple-prod-collector-67686746b8-4wd6m.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:05:13 +0000 UTC Normal Pod simple-prod-collector-67686746b8-4wd6m.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:05:13 +0000 UTC Normal Pod simple-prod-query-6b785cc5f5-c4bnk AddedInterface Add eth0 [10.131.0.72/23] from ovn-kubernetes logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:05:13 +0000 UTC Normal Pod simple-prod-query-6b785cc5f5-c4bnk.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:05:13 +0000 UTC Normal Pod simple-prod-query-6b785cc5f5-c4bnk.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:05:13 +0000 UTC Normal Pod simple-prod-query-6b785cc5f5-c4bnk.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:05:13 +0000 UTC Normal Pod simple-prod-query-6b785cc5f5-c4bnk.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:05:13 +0000 UTC Normal Pod simple-prod-query-6b785cc5f5-c4bnk.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:05:13 +0000 UTC Normal Pod simple-prod-query-6b785cc5f5-c4bnk.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:05:13 +0000 UTC Normal Pod simple-prod-query-6b785cc5f5-c4bnk.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:05:13 +0000 UTC Normal Pod simple-prod-query-6b785cc5f5-c4bnk.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:05:16 | collector-autoscale | 2023-11-06 09:05:13 +0000 UTC Normal Pod simple-prod-query-6b785cc5f5-c4bnk.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:05:16 | collector-autoscale | Deleting namespace: kuttl-test-assured-mollusk === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (318.68s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.78s) --- PASS: kuttl/harness/collector-otlp-production-grpc (75.53s) --- PASS: kuttl/harness/set-custom-img (49.93s) --- PASS: kuttl/harness/collector-otlp-production-http (67.50s) --- PASS: kuttl/harness/collector-otlp-allinone-grpc (39.95s) --- PASS: kuttl/harness/collector-otlp-allinone-http (37.15s) --- PASS: kuttl/harness/collector-autoscale (42.77s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name miscellaneous --report --output /logs/artifacts/miscellaneous.xml ./artifacts/kuttl-report.xml time="2023-11-06T09:05:23Z" level=debug msg="Setting a new name for the test suites" time="2023-11-06T09:05:23Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-11-06T09:05:23Z" level=debug msg="normalizing test case names" time="2023-11-06T09:05:23Z" level=debug msg="miscellaneous/artifacts -> miscellaneous_artifacts" time="2023-11-06T09:05:23Z" level=debug msg="miscellaneous/collector-otlp-production-grpc -> miscellaneous_collector_otlp_production_grpc" time="2023-11-06T09:05:23Z" level=debug msg="miscellaneous/set-custom-img -> miscellaneous_set_custom_img" time="2023-11-06T09:05:23Z" level=debug msg="miscellaneous/collector-otlp-production-http -> miscellaneous_collector_otlp_production_http" time="2023-11-06T09:05:23Z" level=debug msg="miscellaneous/collector-otlp-allinone-grpc -> miscellaneous_collector_otlp_allinone_grpc" time="2023-11-06T09:05:23Z" level=debug msg="miscellaneous/collector-otlp-allinone-http -> miscellaneous_collector_otlp_allinone_http" time="2023-11-06T09:05:23Z" level=debug msg="miscellaneous/collector-autoscale -> miscellaneous_collector_autoscale" +----------------------------------------------+--------+ | NAME | RESULT | +----------------------------------------------+--------+ | miscellaneous_artifacts | passed | | miscellaneous_collector_otlp_production_grpc | passed | | miscellaneous_set_custom_img | passed | | miscellaneous_collector_otlp_production_http | passed | | miscellaneous_collector_otlp_allinone_grpc | passed | | miscellaneous_collector_otlp_allinone_http | passed | | miscellaneous_collector_autoscale | passed | +----------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 1 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + '[' 1 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh sidecar false true + '[' 3 -ne 3 ']' + test_suite_name=sidecar + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/sidecar.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-sidecar make[2]: Entering directory '/tmp/jaeger-tests' ./tests/e2e/sidecar/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-04-120954 True False 51m Cluster version is 4.15.0-0.nightly-2023-11-04-120954' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-04-120954 True False 51m Cluster version is 4.15.0-0.nightly-2023-11-04-120954' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/sidecar/render.sh ++ export SUITE_DIR=./tests/e2e/sidecar ++ SUITE_DIR=./tests/e2e/sidecar ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/sidecar ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + jaeger_service_name=order + start_test sidecar-deployment + '[' 1 -ne 1 ']' + test_name=sidecar-deployment + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-deployment' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-deployment\e[0m' Rendering files for test sidecar-deployment + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build + '[' _build '!=' _build ']' + mkdir -p sidecar-deployment + cd sidecar-deployment + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-namespace + '[' 1 -ne 1 ']' + test_name=sidecar-namespace + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-namespace' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-namespace\e[0m' Rendering files for test sidecar-namespace + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-deployment + '[' sidecar-deployment '!=' _build ']' + cd .. + mkdir -p sidecar-namespace + cd sidecar-namespace + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-skip-webhook + '[' 1 -ne 1 ']' + test_name=sidecar-skip-webhook + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-skip-webhook' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-skip-webhook\e[0m' Rendering files for test sidecar-skip-webhook + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-namespace + '[' sidecar-namespace '!=' _build ']' + cd .. + mkdir -p sidecar-skip-webhook + cd sidecar-skip-webhook + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running sidecar E2E tests' Running sidecar E2E tests + cd tests/e2e/sidecar/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1891468343 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 4 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/sidecar-deployment === PAUSE kuttl/harness/sidecar-deployment === RUN kuttl/harness/sidecar-namespace === PAUSE kuttl/harness/sidecar-namespace === RUN kuttl/harness/sidecar-skip-webhook === PAUSE kuttl/harness/sidecar-skip-webhook === CONT kuttl/harness/sidecar-skip-webhook logger.go:42: 09:05:30 | sidecar-skip-webhook | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:05:30 | sidecar-skip-webhook | Creating namespace: kuttl-test-maximum-bug logger.go:42: 09:05:30 | sidecar-skip-webhook/0-install | starting test step 0-install logger.go:42: 09:05:30 | sidecar-skip-webhook/0-install | Jaeger:kuttl-test-maximum-bug/agent-as-sidecar created logger.go:42: 09:05:36 | sidecar-skip-webhook/0-install | test step completed 0-install logger.go:42: 09:05:36 | sidecar-skip-webhook/1-install | starting test step 1-install logger.go:42: 09:05:36 | sidecar-skip-webhook/1-install | Deployment:kuttl-test-maximum-bug/vertx-create-span-sidecar created logger.go:42: 09:05:38 | sidecar-skip-webhook/1-install | test step completed 1-install logger.go:42: 09:05:38 | sidecar-skip-webhook/2-add-anotation-and-label | starting test step 2-add-anotation-and-label logger.go:42: 09:05:38 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name=jaeger-operator --namespace kuttl-test-maximum-bug] logger.go:42: 09:05:39 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar labeled logger.go:42: 09:05:39 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-maximum-bug] logger.go:42: 09:05:39 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar annotate logger.go:42: 09:05:39 | sidecar-skip-webhook/2-add-anotation-and-label | test step completed 2-add-anotation-and-label logger.go:42: 09:05:39 | sidecar-skip-webhook/3-remove-label | starting test step 3-remove-label logger.go:42: 09:05:39 | sidecar-skip-webhook/3-remove-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name- --namespace kuttl-test-maximum-bug] logger.go:42: 09:05:39 | sidecar-skip-webhook/3-remove-label | deployment.apps/vertx-create-span-sidecar unlabeled logger.go:42: 09:05:40 | sidecar-skip-webhook/3-remove-label | test step completed 3-remove-label logger.go:42: 09:05:40 | sidecar-skip-webhook | sidecar-skip-webhook events from ns kuttl-test-maximum-bug: logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:34 +0000 UTC Normal Pod agent-as-sidecar-545b4666f6-54s2w Binding Scheduled Successfully assigned kuttl-test-maximum-bug/agent-as-sidecar-545b4666f6-54s2w to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:34 +0000 UTC Normal Pod agent-as-sidecar-545b4666f6-54s2w AddedInterface Add eth0 [10.129.2.70/23] from ovn-kubernetes logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:34 +0000 UTC Normal Pod agent-as-sidecar-545b4666f6-54s2w.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:34 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-545b4666f6 SuccessfulCreate Created pod: agent-as-sidecar-545b4666f6-54s2w replicaset-controller logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:34 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-545b4666f6 to 1 deployment-controller logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:35 +0000 UTC Normal Pod agent-as-sidecar-545b4666f6-54s2w.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:35 +0000 UTC Normal Pod agent-as-sidecar-545b4666f6-54s2w.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:36 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-lbdr9 Binding Scheduled Successfully assigned kuttl-test-maximum-bug/vertx-create-span-sidecar-84d458b68c-lbdr9 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:36 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulCreate Created pod: vertx-create-span-sidecar-84d458b68c-lbdr9 replicaset-controller logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:36 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-84d458b68c to 1 deployment-controller logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:37 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-lbdr9 AddedInterface Add eth0 [10.128.2.96/23] from ovn-kubernetes logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:37 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-lbdr9.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:37 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-lbdr9.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:37 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-lbdr9.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:39 +0000 UTC Normal Pod vertx-create-span-sidecar-77c6b7f786-69lr5 Binding Scheduled Successfully assigned kuttl-test-maximum-bug/vertx-create-span-sidecar-77c6b7f786-69lr5 to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:39 +0000 UTC Normal Pod vertx-create-span-sidecar-77c6b7f786-69lr5 AddedInterface Add eth0 [10.131.0.73/23] from ovn-kubernetes logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:39 +0000 UTC Normal Pod vertx-create-span-sidecar-77c6b7f786-69lr5.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:39 +0000 UTC Normal Pod vertx-create-span-sidecar-77c6b7f786-69lr5.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:39 +0000 UTC Normal Pod vertx-create-span-sidecar-77c6b7f786-69lr5.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:39 +0000 UTC Normal Pod vertx-create-span-sidecar-77c6b7f786-69lr5.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:39 +0000 UTC Normal Pod vertx-create-span-sidecar-77c6b7f786-69lr5.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:39 +0000 UTC Normal Pod vertx-create-span-sidecar-77c6b7f786-69lr5.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:39 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-77c6b7f786 SuccessfulCreate Created pod: vertx-create-span-sidecar-77c6b7f786-69lr5 replicaset-controller logger.go:42: 09:05:40 | sidecar-skip-webhook | 2023-11-06 09:05:39 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-77c6b7f786 to 1 deployment-controller logger.go:42: 09:05:40 | sidecar-skip-webhook | Deleting namespace: kuttl-test-maximum-bug === CONT kuttl/harness/artifacts logger.go:42: 09:05:46 | artifacts | Creating namespace: kuttl-test-engaging-raccoon logger.go:42: 09:05:46 | artifacts | artifacts events from ns kuttl-test-engaging-raccoon: logger.go:42: 09:05:46 | artifacts | Deleting namespace: kuttl-test-engaging-raccoon === CONT kuttl/harness/sidecar-namespace logger.go:42: 09:05:52 | sidecar-namespace | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:05:52 | sidecar-namespace | Creating namespace: kuttl-test-main-dinosaur logger.go:42: 09:05:52 | sidecar-namespace/0-install | starting test step 0-install logger.go:42: 09:05:52 | sidecar-namespace/0-install | Jaeger:kuttl-test-main-dinosaur/agent-as-sidecar created logger.go:42: 09:05:58 | sidecar-namespace/0-install | test step completed 0-install logger.go:42: 09:05:58 | sidecar-namespace/1-install | starting test step 1-install logger.go:42: 09:05:58 | sidecar-namespace/1-install | Deployment:kuttl-test-main-dinosaur/vertx-create-span-sidecar created logger.go:42: 09:06:00 | sidecar-namespace/1-install | test step completed 1-install logger.go:42: 09:06:00 | sidecar-namespace/2-enable-injection | starting test step 2-enable-injection logger.go:42: 09:06:00 | sidecar-namespace/2-enable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="true"] logger.go:42: 09:06:00 | sidecar-namespace/2-enable-injection | namespace/kuttl-test-main-dinosaur annotate logger.go:42: 09:06:01 | sidecar-namespace/2-enable-injection | test step completed 2-enable-injection logger.go:42: 09:06:01 | sidecar-namespace/3-find-service | starting test step 3-find-service logger.go:42: 09:06:01 | sidecar-namespace/3-find-service | Job:kuttl-test-main-dinosaur/00-find-service created logger.go:42: 09:06:12 | sidecar-namespace/3-find-service | test step completed 3-find-service logger.go:42: 09:06:12 | sidecar-namespace/4-other-instance | starting test step 4-other-instance logger.go:42: 09:06:12 | sidecar-namespace/4-other-instance | Jaeger:kuttl-test-main-dinosaur/agent-as-sidecar2 created logger.go:42: 09:06:19 | sidecar-namespace/4-other-instance | test step completed 4-other-instance logger.go:42: 09:06:19 | sidecar-namespace/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 09:06:19 | sidecar-namespace/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 09:06:19 | sidecar-namespace/6-find-service | starting test step 6-find-service logger.go:42: 09:06:19 | sidecar-namespace/6-find-service | Job:kuttl-test-main-dinosaur/01-find-service created logger.go:42: 09:06:38 | sidecar-namespace/6-find-service | test step completed 6-find-service logger.go:42: 09:06:38 | sidecar-namespace/7-disable-injection | starting test step 7-disable-injection logger.go:42: 09:06:38 | sidecar-namespace/7-disable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="false"] logger.go:42: 09:06:38 | sidecar-namespace/7-disable-injection | namespace/kuttl-test-main-dinosaur annotate logger.go:42: 09:06:40 | sidecar-namespace/7-disable-injection | test step completed 7-disable-injection logger.go:42: 09:06:40 | sidecar-namespace | sidecar-namespace events from ns kuttl-test-main-dinosaur: logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:05:55 +0000 UTC Normal Pod agent-as-sidecar-7d876d6c6b-6w8tr Binding Scheduled Successfully assigned kuttl-test-main-dinosaur/agent-as-sidecar-7d876d6c6b-6w8tr to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:05:55 +0000 UTC Warning Pod agent-as-sidecar-7d876d6c6b-6w8tr FailedMount MountVolume.SetUp failed for volume "agent-as-sidecar-collector-tls-config-volume" : secret "agent-as-sidecar-collector-headless-tls" not found kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:05:55 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-7d876d6c6b SuccessfulCreate Created pod: agent-as-sidecar-7d876d6c6b-6w8tr replicaset-controller logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:05:55 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-7d876d6c6b to 1 deployment-controller logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:05:56 +0000 UTC Normal Pod agent-as-sidecar-7d876d6c6b-6w8tr AddedInterface Add eth0 [10.129.2.71/23] from ovn-kubernetes logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:05:56 +0000 UTC Normal Pod agent-as-sidecar-7d876d6c6b-6w8tr.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:05:56 +0000 UTC Normal Pod agent-as-sidecar-7d876d6c6b-6w8tr.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:05:56 +0000 UTC Normal Pod agent-as-sidecar-7d876d6c6b-6w8tr.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:05:58 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-mk2hk Binding Scheduled Successfully assigned kuttl-test-main-dinosaur/vertx-create-span-sidecar-84d458b68c-mk2hk to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:05:58 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-mk2hk AddedInterface Add eth0 [10.128.2.97/23] from ovn-kubernetes logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:05:58 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-mk2hk.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:05:58 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-mk2hk.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:05:58 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-mk2hk.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:05:58 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulCreate Created pod: vertx-create-span-sidecar-84d458b68c-mk2hk replicaset-controller logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:05:58 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-84d458b68c to 1 deployment-controller logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:00 +0000 UTC Normal Pod vertx-create-span-sidecar-685fd77468-dbtcl Binding Scheduled Successfully assigned kuttl-test-main-dinosaur/vertx-create-span-sidecar-685fd77468-dbtcl to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:00 +0000 UTC Normal Pod vertx-create-span-sidecar-685fd77468-dbtcl AddedInterface Add eth0 [10.131.0.74/23] from ovn-kubernetes logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:00 +0000 UTC Normal Pod vertx-create-span-sidecar-685fd77468-dbtcl.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:00 +0000 UTC Normal Pod vertx-create-span-sidecar-685fd77468-dbtcl.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:00 +0000 UTC Normal Pod vertx-create-span-sidecar-685fd77468-dbtcl.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:00 +0000 UTC Normal Pod vertx-create-span-sidecar-685fd77468-dbtcl.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:00 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-685fd77468 SuccessfulCreate Created pod: vertx-create-span-sidecar-685fd77468-dbtcl replicaset-controller logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:00 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-685fd77468 to 1 deployment-controller logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:01 +0000 UTC Normal Pod 00-find-service-b8q9q Binding Scheduled Successfully assigned kuttl-test-main-dinosaur/00-find-service-b8q9q to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:01 +0000 UTC Normal Pod 00-find-service-b8q9q AddedInterface Add eth0 [10.129.2.72/23] from ovn-kubernetes logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:01 +0000 UTC Normal Pod 00-find-service-b8q9q.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:01 +0000 UTC Normal Pod 00-find-service-b8q9q.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:01 +0000 UTC Normal Pod 00-find-service-b8q9q.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:01 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-b8q9q job-controller logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:01 +0000 UTC Normal Pod vertx-create-span-sidecar-685fd77468-dbtcl.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:01 +0000 UTC Normal Pod vertx-create-span-sidecar-685fd77468-dbtcl.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:06 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-mk2hk.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.97:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:06 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-mk2hk.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.97:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:08 +0000 UTC Warning Pod vertx-create-span-sidecar-685fd77468-dbtcl.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.74:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:08 +0000 UTC Warning Pod vertx-create-span-sidecar-685fd77468-dbtcl.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.74:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:08 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-mk2hk.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:09 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-mk2hk.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.97:8080/": read tcp 10.128.2.2:50232->10.128.2.97:8080: read: connection reset by peer kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:09 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-mk2hk.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.97:8080/": dial tcp 10.128.2.97:8080: connect: connection refused kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:11 +0000 UTC Normal Pod vertx-create-span-sidecar-685fd77468-dbtcl.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:11 +0000 UTC Warning Pod vertx-create-span-sidecar-685fd77468-dbtcl.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.74:8080/": read tcp 10.131.0.2:38314->10.131.0.74:8080: read: connection reset by peer kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:11 +0000 UTC Warning Pod vertx-create-span-sidecar-685fd77468-dbtcl.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.74:8080/": dial tcp 10.131.0.74:8080: connect: connection refused kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:12 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:18 +0000 UTC Normal Pod agent-as-sidecar2-656887f87d-x9wqh Binding Scheduled Successfully assigned kuttl-test-main-dinosaur/agent-as-sidecar2-656887f87d-x9wqh to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:18 +0000 UTC Normal Pod agent-as-sidecar2-656887f87d-x9wqh AddedInterface Add eth0 [10.129.2.73/23] from ovn-kubernetes logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:18 +0000 UTC Normal Pod agent-as-sidecar2-656887f87d-x9wqh.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:18 +0000 UTC Normal Pod agent-as-sidecar2-656887f87d-x9wqh.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:18 +0000 UTC Normal Pod agent-as-sidecar2-656887f87d-x9wqh.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:18 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-656887f87d SuccessfulCreate Created pod: agent-as-sidecar2-656887f87d-x9wqh replicaset-controller logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:18 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-656887f87d to 1 deployment-controller logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:19 +0000 UTC Normal Pod 01-find-service-ssrcd Binding Scheduled Successfully assigned kuttl-test-main-dinosaur/01-find-service-ssrcd to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:19 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-ssrcd job-controller logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:19 +0000 UTC Normal Pod agent-as-sidecar-7d876d6c6b-6w8tr.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:20 +0000 UTC Normal Pod 01-find-service-ssrcd AddedInterface Add eth0 [10.128.2.98/23] from ovn-kubernetes logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:20 +0000 UTC Normal Pod 01-find-service-ssrcd.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:20 +0000 UTC Normal Pod 01-find-service-ssrcd.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:20 +0000 UTC Normal Pod 01-find-service-ssrcd.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:20 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-mk2hk.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.97:8080/": read tcp 10.128.2.2:44584->10.128.2.97:8080: read: connection reset by peer kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:23 +0000 UTC Normal Pod vertx-create-span-sidecar-65f5b4c7db-mlhn8 Binding Scheduled Successfully assigned kuttl-test-main-dinosaur/vertx-create-span-sidecar-65f5b4c7db-mlhn8 to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:23 +0000 UTC Normal Pod vertx-create-span-sidecar-65f5b4c7db-mlhn8 AddedInterface Add eth0 [10.129.2.74/23] from ovn-kubernetes logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:23 +0000 UTC Normal Pod vertx-create-span-sidecar-65f5b4c7db-mlhn8.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:23 +0000 UTC Normal Pod vertx-create-span-sidecar-65f5b4c7db-mlhn8.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:23 +0000 UTC Normal Pod vertx-create-span-sidecar-65f5b4c7db-mlhn8.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:23 +0000 UTC Normal Pod vertx-create-span-sidecar-65f5b4c7db-mlhn8.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:23 +0000 UTC Normal Pod vertx-create-span-sidecar-65f5b4c7db-mlhn8.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:23 +0000 UTC Normal Pod vertx-create-span-sidecar-65f5b4c7db-mlhn8.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:23 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-65f5b4c7db SuccessfulCreate Created pod: vertx-create-span-sidecar-65f5b4c7db-mlhn8 replicaset-controller logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:23 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulDelete Deleted pod: vertx-create-span-sidecar-84d458b68c-mk2hk replicaset-controller logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:23 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-84d458b68c to 0 from 1 deployment-controller logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:23 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-65f5b4c7db to 1 from 0 deployment-controller logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:31 +0000 UTC Warning Pod vertx-create-span-sidecar-65f5b4c7db-mlhn8.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.74:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:31 +0000 UTC Warning Pod vertx-create-span-sidecar-65f5b4c7db-mlhn8.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.129.2.74:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:33 +0000 UTC Normal Pod vertx-create-span-sidecar-65f5b4c7db-mlhn8.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:33 +0000 UTC Warning Pod vertx-create-span-sidecar-65f5b4c7db-mlhn8.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.74:8080/": read tcp 10.129.2.2:42582->10.129.2.74:8080: read: connection reset by peer kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:33 +0000 UTC Warning Pod vertx-create-span-sidecar-65f5b4c7db-mlhn8.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.74:8080/": dial tcp 10.129.2.74:8080: connect: connection refused kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:38 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:38 +0000 UTC Normal Pod vertx-create-span-sidecar-56d958c7d5-2p6tj Binding Scheduled Successfully assigned kuttl-test-main-dinosaur/vertx-create-span-sidecar-56d958c7d5-2p6tj to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:38 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-56d958c7d5 SuccessfulCreate Created pod: vertx-create-span-sidecar-56d958c7d5-2p6tj replicaset-controller logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:38 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-685fd77468 SuccessfulDelete Deleted pod: vertx-create-span-sidecar-685fd77468-dbtcl replicaset-controller logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:38 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-685fd77468 to 0 from 1 deployment-controller logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:38 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-56d958c7d5 to 1 from 0 deployment-controller logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:39 +0000 UTC Normal Pod vertx-create-span-sidecar-56d958c7d5-2p6tj AddedInterface Add eth0 [10.128.2.99/23] from ovn-kubernetes logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:39 +0000 UTC Normal Pod vertx-create-span-sidecar-56d958c7d5-2p6tj.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:39 +0000 UTC Normal Pod vertx-create-span-sidecar-56d958c7d5-2p6tj.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 09:06:40 | sidecar-namespace | 2023-11-06 09:06:39 +0000 UTC Normal Pod vertx-create-span-sidecar-56d958c7d5-2p6tj.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 09:06:41 | sidecar-namespace | Deleting namespace: kuttl-test-main-dinosaur === CONT kuttl/harness/sidecar-deployment logger.go:42: 09:06:47 | sidecar-deployment | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:06:47 | sidecar-deployment | Creating namespace: kuttl-test-giving-baboon logger.go:42: 09:06:47 | sidecar-deployment/0-install | starting test step 0-install logger.go:42: 09:06:47 | sidecar-deployment/0-install | Jaeger:kuttl-test-giving-baboon/agent-as-sidecar created logger.go:42: 09:06:53 | sidecar-deployment/0-install | test step completed 0-install logger.go:42: 09:06:53 | sidecar-deployment/1-install | starting test step 1-install logger.go:42: 09:06:53 | sidecar-deployment/1-install | Deployment:kuttl-test-giving-baboon/vertx-create-span-sidecar created logger.go:42: 09:06:54 | sidecar-deployment/1-install | test step completed 1-install logger.go:42: 09:06:54 | sidecar-deployment/2-enable-injection | starting test step 2-enable-injection logger.go:42: 09:06:54 | sidecar-deployment/2-enable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-giving-baboon] logger.go:42: 09:06:54 | sidecar-deployment/2-enable-injection | deployment.apps/vertx-create-span-sidecar annotate logger.go:42: 09:06:56 | sidecar-deployment/2-enable-injection | test step completed 2-enable-injection logger.go:42: 09:06:56 | sidecar-deployment/3-find-service | starting test step 3-find-service logger.go:42: 09:06:56 | sidecar-deployment/3-find-service | Job:kuttl-test-giving-baboon/00-find-service created logger.go:42: 09:07:08 | sidecar-deployment/3-find-service | test step completed 3-find-service logger.go:42: 09:07:08 | sidecar-deployment/4-other-instance | starting test step 4-other-instance logger.go:42: 09:07:08 | sidecar-deployment/4-other-instance | Jaeger:kuttl-test-giving-baboon/agent-as-sidecar2 created logger.go:42: 09:07:14 | sidecar-deployment/4-other-instance | test step completed 4-other-instance logger.go:42: 09:07:14 | sidecar-deployment/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 09:07:16 | sidecar-deployment/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 09:07:16 | sidecar-deployment/6-find-service | starting test step 6-find-service logger.go:42: 09:07:16 | sidecar-deployment/6-find-service | Job:kuttl-test-giving-baboon/01-find-service created logger.go:42: 09:07:36 | sidecar-deployment/6-find-service | test step completed 6-find-service logger.go:42: 09:07:36 | sidecar-deployment/7-disable-injection | starting test step 7-disable-injection logger.go:42: 09:07:36 | sidecar-deployment/7-disable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=false --namespace kuttl-test-giving-baboon] logger.go:42: 09:07:36 | sidecar-deployment/7-disable-injection | deployment.apps/vertx-create-span-sidecar annotate logger.go:42: 09:07:38 | sidecar-deployment/7-disable-injection | test step completed 7-disable-injection logger.go:42: 09:07:38 | sidecar-deployment | sidecar-deployment events from ns kuttl-test-giving-baboon: logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:51 +0000 UTC Normal Pod agent-as-sidecar-5d498d989-v7vmj Binding Scheduled Successfully assigned kuttl-test-giving-baboon/agent-as-sidecar-5d498d989-v7vmj to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:51 +0000 UTC Normal Pod agent-as-sidecar-5d498d989-v7vmj AddedInterface Add eth0 [10.129.2.75/23] from ovn-kubernetes logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:51 +0000 UTC Normal Pod agent-as-sidecar-5d498d989-v7vmj.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:51 +0000 UTC Normal Pod agent-as-sidecar-5d498d989-v7vmj.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:51 +0000 UTC Normal Pod agent-as-sidecar-5d498d989-v7vmj.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:51 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-5d498d989 SuccessfulCreate Created pod: agent-as-sidecar-5d498d989-v7vmj replicaset-controller logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:51 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-5d498d989 to 1 deployment-controller logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:53 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-d2ncm Binding Scheduled Successfully assigned kuttl-test-giving-baboon/vertx-create-span-sidecar-84d458b68c-d2ncm to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:53 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulCreate Created pod: vertx-create-span-sidecar-84d458b68c-d2ncm replicaset-controller logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:53 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-84d458b68c to 1 deployment-controller logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:54 +0000 UTC Normal Pod vertx-create-span-sidecar-68b899fb85-sjl24 Binding Scheduled Successfully assigned kuttl-test-giving-baboon/vertx-create-span-sidecar-68b899fb85-sjl24 to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:54 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-68b899fb85 SuccessfulCreate Created pod: vertx-create-span-sidecar-68b899fb85-sjl24 replicaset-controller logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:54 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-d2ncm AddedInterface Add eth0 [10.128.2.100/23] from ovn-kubernetes logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:54 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-d2ncm.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:54 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-d2ncm.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:54 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-d2ncm.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:54 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-68b899fb85 to 1 deployment-controller logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:55 +0000 UTC Normal Pod vertx-create-span-sidecar-68b899fb85-sjl24 AddedInterface Add eth0 [10.131.0.75/23] from ovn-kubernetes logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:55 +0000 UTC Normal Pod vertx-create-span-sidecar-68b899fb85-sjl24.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:55 +0000 UTC Normal Pod vertx-create-span-sidecar-68b899fb85-sjl24.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:55 +0000 UTC Normal Pod vertx-create-span-sidecar-68b899fb85-sjl24.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:55 +0000 UTC Normal Pod vertx-create-span-sidecar-68b899fb85-sjl24.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:55 +0000 UTC Normal Pod vertx-create-span-sidecar-68b899fb85-sjl24.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:55 +0000 UTC Normal Pod vertx-create-span-sidecar-68b899fb85-sjl24.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:56 +0000 UTC Normal Pod 00-find-service-b6nrr Binding Scheduled Successfully assigned kuttl-test-giving-baboon/00-find-service-b6nrr to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:56 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-b6nrr job-controller logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:57 +0000 UTC Normal Pod 00-find-service-b6nrr AddedInterface Add eth0 [10.128.2.101/23] from ovn-kubernetes logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:57 +0000 UTC Normal Pod 00-find-service-b6nrr.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:57 +0000 UTC Normal Pod 00-find-service-b6nrr.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:06:57 +0000 UTC Normal Pod 00-find-service-b6nrr.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:03 +0000 UTC Warning Pod vertx-create-span-sidecar-68b899fb85-sjl24.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.75:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:03 +0000 UTC Warning Pod vertx-create-span-sidecar-68b899fb85-sjl24.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.75:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:03 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-d2ncm.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.100:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:03 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-d2ncm.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.100:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:05 +0000 UTC Normal Pod vertx-create-span-sidecar-68b899fb85-sjl24.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:05 +0000 UTC Warning Pod vertx-create-span-sidecar-68b899fb85-sjl24.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.75:8080/": read tcp 10.131.0.2:54912->10.131.0.75:8080: read: connection reset by peer kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:05 +0000 UTC Warning Pod vertx-create-span-sidecar-68b899fb85-sjl24.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.75:8080/": dial tcp 10.131.0.75:8080: connect: connection refused kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:05 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-d2ncm.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:05 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-d2ncm.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.100:8080/": read tcp 10.128.2.2:36210->10.128.2.100:8080: read: connection reset by peer kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:05 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-d2ncm.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.100:8080/": dial tcp 10.128.2.100:8080: connect: connection refused kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:08 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:12 +0000 UTC Normal Pod agent-as-sidecar2-565b997bc9-8g2jm Binding Scheduled Successfully assigned kuttl-test-giving-baboon/agent-as-sidecar2-565b997bc9-8g2jm to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:12 +0000 UTC Normal Pod agent-as-sidecar2-565b997bc9-8g2jm AddedInterface Add eth0 [10.128.2.102/23] from ovn-kubernetes logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:12 +0000 UTC Normal Pod agent-as-sidecar2-565b997bc9-8g2jm.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:12 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-565b997bc9 SuccessfulCreate Created pod: agent-as-sidecar2-565b997bc9-8g2jm replicaset-controller logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:12 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-565b997bc9 to 1 deployment-controller logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:13 +0000 UTC Normal Pod agent-as-sidecar2-565b997bc9-8g2jm.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:13 +0000 UTC Normal Pod agent-as-sidecar2-565b997bc9-8g2jm.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:15 +0000 UTC Normal Pod agent-as-sidecar-5d498d989-v7vmj.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:16 +0000 UTC Normal Pod 01-find-service-22xnw Binding Scheduled Successfully assigned kuttl-test-giving-baboon/01-find-service-22xnw to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:16 +0000 UTC Normal Pod 01-find-service-22xnw AddedInterface Add eth0 [10.129.2.76/23] from ovn-kubernetes logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:16 +0000 UTC Normal Pod 01-find-service-22xnw.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-4qrjzj65/pipeline@sha256:a3771327d94f4c3b35381e0847c031be677f0a6c94d4a8f8d3b4082c54239c9d" already present on machine kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:16 +0000 UTC Normal Pod 01-find-service-22xnw.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:16 +0000 UTC Normal Pod 01-find-service-22xnw.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:16 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-22xnw job-controller logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:16 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-d2ncm.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.100:8080/": read tcp 10.128.2.2:53316->10.128.2.100:8080: read: connection reset by peer kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:18 +0000 UTC Normal Pod vertx-create-span-sidecar-556b94cc5-m7fxx Binding Scheduled Successfully assigned kuttl-test-giving-baboon/vertx-create-span-sidecar-556b94cc5-m7fxx to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:18 +0000 UTC Normal Pod vertx-create-span-sidecar-556b94cc5-m7fxx AddedInterface Add eth0 [10.129.2.77/23] from ovn-kubernetes logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:18 +0000 UTC Normal Pod vertx-create-span-sidecar-556b94cc5-m7fxx.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:18 +0000 UTC Normal Pod vertx-create-span-sidecar-556b94cc5-m7fxx.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:18 +0000 UTC Normal Pod vertx-create-span-sidecar-556b94cc5-m7fxx.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:18 +0000 UTC Normal Pod vertx-create-span-sidecar-556b94cc5-m7fxx.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:18 +0000 UTC Normal Pod vertx-create-span-sidecar-556b94cc5-m7fxx.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:18 +0000 UTC Normal Pod vertx-create-span-sidecar-556b94cc5-m7fxx.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:18 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-556b94cc5 SuccessfulCreate Created pod: vertx-create-span-sidecar-556b94cc5-m7fxx replicaset-controller logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:18 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulDelete Deleted pod: vertx-create-span-sidecar-84d458b68c-d2ncm replicaset-controller logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:18 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-84d458b68c to 0 from 1 deployment-controller logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:18 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-556b94cc5 to 1 from 0 deployment-controller logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:26 +0000 UTC Warning Pod vertx-create-span-sidecar-556b94cc5-m7fxx.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.77:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:26 +0000 UTC Warning Pod vertx-create-span-sidecar-556b94cc5-m7fxx.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.129.2.77:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:28 +0000 UTC Normal Pod vertx-create-span-sidecar-556b94cc5-m7fxx.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:29 +0000 UTC Warning Pod vertx-create-span-sidecar-556b94cc5-m7fxx.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.77:8080/": read tcp 10.129.2.2:44322->10.129.2.77:8080: read: connection reset by peer kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:29 +0000 UTC Warning Pod vertx-create-span-sidecar-556b94cc5-m7fxx.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.77:8080/": dial tcp 10.129.2.77:8080: connect: connection refused kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:35 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:36 +0000 UTC Normal Pod vertx-create-span-sidecar-5d85bfb966-ssr5x Binding Scheduled Successfully assigned kuttl-test-giving-baboon/vertx-create-span-sidecar-5d85bfb966-ssr5x to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:36 +0000 UTC Normal Pod vertx-create-span-sidecar-5d85bfb966-ssr5x AddedInterface Add eth0 [10.128.2.103/23] from ovn-kubernetes logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:36 +0000 UTC Normal Pod vertx-create-span-sidecar-5d85bfb966-ssr5x.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:36 +0000 UTC Normal Pod vertx-create-span-sidecar-5d85bfb966-ssr5x.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:36 +0000 UTC Normal Pod vertx-create-span-sidecar-5d85bfb966-ssr5x.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:36 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-5d85bfb966 SuccessfulCreate Created pod: vertx-create-span-sidecar-5d85bfb966-ssr5x replicaset-controller logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:36 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-68b899fb85 SuccessfulDelete Deleted pod: vertx-create-span-sidecar-68b899fb85-sjl24 replicaset-controller logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:36 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-68b899fb85 to 0 from 1 deployment-controller logger.go:42: 09:07:38 | sidecar-deployment | 2023-11-06 09:07:36 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-5d85bfb966 to 1 from 0 deployment-controller logger.go:42: 09:07:38 | sidecar-deployment | Deleting namespace: kuttl-test-giving-baboon === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (134.09s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/sidecar-skip-webhook (15.45s) --- PASS: kuttl/harness/artifacts (5.78s) --- PASS: kuttl/harness/sidecar-namespace (55.31s) --- PASS: kuttl/harness/sidecar-deployment (57.52s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name sidecar --report --output /logs/artifacts/sidecar.xml ./artifacts/kuttl-report.xml time="2023-11-06T09:07:46Z" level=debug msg="Setting a new name for the test suites" time="2023-11-06T09:07:46Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-11-06T09:07:46Z" level=debug msg="normalizing test case names" time="2023-11-06T09:07:46Z" level=debug msg="sidecar/sidecar-skip-webhook -> sidecar_sidecar_skip_webhook" time="2023-11-06T09:07:46Z" level=debug msg="sidecar/artifacts -> sidecar_artifacts" time="2023-11-06T09:07:46Z" level=debug msg="sidecar/sidecar-namespace -> sidecar_sidecar_namespace" time="2023-11-06T09:07:46Z" level=debug msg="sidecar/sidecar-deployment -> sidecar_sidecar_deployment" +------------------------------+--------+ | NAME | RESULT | +------------------------------+--------+ | sidecar_sidecar_skip_webhook | passed | | sidecar_artifacts | passed | | sidecar_sidecar_namespace | passed | | sidecar_sidecar_deployment | passed | +------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 1 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/sidecar.xml + '[' 0 -gt 0 ']' + '[' 1 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh streaming false true + '[' 3 -ne 3 ']' + test_suite_name=streaming + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/streaming.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-streaming make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=0.32.0 \ SKIP_KAFKA=false \ SKIP_ES_EXTERNAL=true \ ./tests/e2e/streaming/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-04-120954 True False 53m Cluster version is 4.15.0-0.nightly-2023-11-04-120954' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-04-120954 True False 53m Cluster version is 4.15.0-0.nightly-2023-11-04-120954' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 0.32.0 ']' ++ version_le 0.32.0 0.25.0 +++ echo 0.32.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 0.32.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/streaming/render.sh ++ export SUITE_DIR=./tests/e2e/streaming ++ SUITE_DIR=./tests/e2e/streaming ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/streaming ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + '[' false = true ']' + start_test streaming-simple + '[' 1 -ne 1 ']' + test_name=streaming-simple + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-simple' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-simple\e[0m' Rendering files for test streaming-simple + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build + '[' _build '!=' _build ']' + mkdir -p streaming-simple + cd streaming-simple + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./1-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/streaming-jaeger-assert.yaml.template -o ./04-assert.yaml + render_smoke_test simple-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=simple-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + export JAEGER_NAME=simple-streaming + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-tls + '[' 1 -ne 1 ']' + test_name=streaming-with-tls + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-tls' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-tls\e[0m' Rendering files for test streaming-with-tls + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-simple + '[' streaming-simple '!=' _build ']' + cd .. + mkdir -p streaming-with-tls + cd streaming-with-tls + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./1-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + render_smoke_test tls-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=tls-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + export JAEGER_NAME=tls-streaming + JAEGER_NAME=tls-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-autoprovisioning-autoscale + '[' 1 -ne 1 ']' + test_name=streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-autoprovisioning-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-autoprovisioning-autoscale\e[0m' Rendering files for test streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-with-tls + '[' streaming-with-tls '!=' _build ']' + cd .. + mkdir -p streaming-with-autoprovisioning-autoscale + cd streaming-with-autoprovisioning-autoscale + '[' true = true ']' + rm ./00-install.yaml ./00-assert.yaml + render_install_elasticsearch upstream 01 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=01 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./01-assert.yaml + jaeger_name=auto-provisioned + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="20Mi"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="500m"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.autoscale=true ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.minReplicas=1 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.maxReplicas=2 ./02-install.yaml + render_assert_kafka true auto-provisioned 03 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provisioned + test_step=03 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./03-assert.yaml ++ expr 03 + 1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./4-assert.yaml ++ expr 03 + 2 + CLUSTER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./05-assert.yaml + render_install_tracegen auto-provisioned 06 + '[' 2 -ne 2 ']' + jaeger=auto-provisioned + step=06 + replicas=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/tracegen.yaml -o ./06-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.replicas=1 ./06-install.yaml + sed -i s~simple-prod~auto-provisioned~gi ./06-install.yaml + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-tracegen.yaml.template -o ./06-assert.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running streaming E2E tests' Running streaming E2E tests + cd tests/e2e/streaming/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1891468343 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 4 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/streaming-simple === PAUSE kuttl/harness/streaming-simple === RUN kuttl/harness/streaming-with-autoprovisioning-autoscale === PAUSE kuttl/harness/streaming-with-autoprovisioning-autoscale === RUN kuttl/harness/streaming-with-tls === PAUSE kuttl/harness/streaming-with-tls === CONT kuttl/harness/artifacts logger.go:42: 09:08:00 | artifacts | Creating namespace: kuttl-test-helping-possum logger.go:42: 09:08:00 | artifacts | artifacts events from ns kuttl-test-helping-possum: logger.go:42: 09:08:00 | artifacts | Deleting namespace: kuttl-test-helping-possum === CONT kuttl/harness/streaming-with-autoprovisioning-autoscale logger.go:42: 09:08:05 | streaming-with-autoprovisioning-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:08:05 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:08:05 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:08:05 | streaming-with-autoprovisioning-autoscale | Creating namespace: kuttl-test-hip-dory logger.go:42: 09:08:05 | streaming-with-autoprovisioning-autoscale/1-install | starting test step 1-install logger.go:42: 09:08:05 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc create sa deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 09:08:06 | streaming-with-autoprovisioning-autoscale/1-install | serviceaccount/deploy-elasticsearch created logger.go:42: 09:08:06 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc adm policy add-scc-to-user privileged -z deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 09:08:06 | streaming-with-autoprovisioning-autoscale/1-install | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:privileged added: "deploy-elasticsearch" logger.go:42: 09:08:06 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 6] logger.go:42: 09:08:12 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_0.yml -n $NAMESPACE] logger.go:42: 09:08:12 | streaming-with-autoprovisioning-autoscale/1-install | statefulset.apps/elasticsearch created logger.go:42: 09:08:12 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 3] logger.go:42: 09:08:15 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_1.yml -n $NAMESPACE] logger.go:42: 09:08:16 | streaming-with-autoprovisioning-autoscale/1-install | service/elasticsearch created logger.go:42: 09:08:33 | streaming-with-autoprovisioning-autoscale/1-install | test step completed 1-install logger.go:42: 09:08:33 | streaming-with-autoprovisioning-autoscale/2-install | starting test step 2-install logger.go:42: 09:08:33 | streaming-with-autoprovisioning-autoscale/2-install | Jaeger:kuttl-test-hip-dory/auto-provisioned created logger.go:42: 09:08:33 | streaming-with-autoprovisioning-autoscale/2-install | test step completed 2-install logger.go:42: 09:08:33 | streaming-with-autoprovisioning-autoscale/3- | starting test step 3- logger.go:42: 09:09:06 | streaming-with-autoprovisioning-autoscale/3- | test step completed 3- logger.go:42: 09:09:06 | streaming-with-autoprovisioning-autoscale/4- | starting test step 4- logger.go:42: 09:09:35 | streaming-with-autoprovisioning-autoscale/4- | test step completed 4- logger.go:42: 09:09:35 | streaming-with-autoprovisioning-autoscale/5- | starting test step 5- logger.go:42: 09:09:57 | streaming-with-autoprovisioning-autoscale/5- | test step completed 5- logger.go:42: 09:09:57 | streaming-with-autoprovisioning-autoscale/6-install | starting test step 6-install logger.go:42: 09:09:57 | streaming-with-autoprovisioning-autoscale/6-install | Deployment:kuttl-test-hip-dory/tracegen created logger.go:42: 09:10:07 | streaming-with-autoprovisioning-autoscale/6-install | test step completed 6-install logger.go:42: 09:10:07 | streaming-with-autoprovisioning-autoscale/7- | starting test step 7- logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale/7- | test step failed 7- case.go:364: failed in step 7- case.go:366: --- Deployment:kuttl-test-hip-dory/auto-provisioned-ingester +++ Deployment:kuttl-test-hip-dory/auto-provisioned-ingester @@ -1,8 +1,320 @@ apiVersion: apps/v1 kind: Deployment metadata: + labels: + app: jaeger + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: auto-provisioned + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: auto-provisioned-ingester + app.kubernetes.io/part-of: jaeger + managedFields: + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:ownerReferences: + .: {} + k:{"uid":"16d401d0-78f7-43c6-bb65-9bfba0ae1ac4"}: {} + f:spec: + f:progressDeadlineSeconds: {} + f:replicas: {} + f:revisionHistoryLimit: {} + f:selector: {} + f:strategy: + f:type: {} + f:template: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.istio.io/inject: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:spec: + f:containers: + k:{"name":"jaeger-ingester"}: + .: {} + f:args: {} + f:env: + .: {} + k:{"name":"SPAN_STORAGE_TYPE"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:livenessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":14270,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:readinessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:resources: + .: {} + f:requests: + .: {} + f:memory: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/var/run/secrets/auto-provisioned"}: + .: {} + f:mountPath: {} + f:name: {} + k:{"mountPath":"/var/run/secrets/auto-provisioned-cluster-ca"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:enableServiceLinks: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:serviceAccount: {} + f:serviceAccountName: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"auto-provisioned-trusted-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"kafkauser-auto-provisioned"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"kafkauser-auto-provisioned-cluster-ca"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: Go-http-client + operation: Update + time: "2023-11-06T09:09:59Z" + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:deployment.kubernetes.io/revision: {} + f:status: + f:availableReplicas: {} + f:conditions: + .: {} + k:{"type":"Available"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + k:{"type":"Progressing"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + f:observedGeneration: {} + f:readyReplicas: {} + f:replicas: {} + f:updatedReplicas: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2023-11-06T09:10:01Z" name: auto-provisioned-ingester namespace: kuttl-test-hip-dory + ownerReferences: + - apiVersion: jaegertracing.io/v1 + controller: true + kind: Jaeger + name: auto-provisioned + uid: 16d401d0-78f7-43c6-bb65-9bfba0ae1ac4 +spec: + progressDeadlineSeconds: 600 + replicas: 1 + revisionHistoryLimit: 10 + selector: + matchLabels: + app: jaeger + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: auto-provisioned + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: auto-provisioned-ingester + app.kubernetes.io/part-of: jaeger + strategy: + type: Recreate + template: + metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "14270" + prometheus.io/scrape: "true" + sidecar.istio.io/inject: "false" + creationTimestamp: null + labels: + app: jaeger + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: auto-provisioned + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: auto-provisioned-ingester + app.kubernetes.io/part-of: jaeger + spec: + containers: + - args: + - --es.server-urls=http://elasticsearch:9200 + - --kafka.consumer.authentication=tls + - --kafka.consumer.brokers=auto-provisioned-kafka-bootstrap.kuttl-test-hip-dory.svc.cluster.local:9093 + - --kafka.consumer.tls.ca=/var/run/secrets/auto-provisioned-cluster-ca/ca.crt + - --kafka.consumer.tls.cert=/var/run/secrets/auto-provisioned/user.crt + - --kafka.consumer.tls.enabled=true + - --kafka.consumer.tls.key=/var/run/secrets/auto-provisioned/user.key + env: + - name: SPAN_STORAGE_TYPE + value: elasticsearch + image: registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:109a3676bfaccda0021c57bbe82bceed5140faaedb8bad4d7d2be7cd660de039 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 14270 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-ingester + ports: + - containerPort: 14270 + name: admin-http + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 14270 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: + requests: + memory: 500m + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /var/run/secrets/auto-provisioned + name: kafkauser-auto-provisioned + - mountPath: /var/run/secrets/auto-provisioned-cluster-ca + name: kafkauser-auto-provisioned-cluster-ca + - mountPath: /etc/pki/ca-trust/extracted/pem + name: auto-provisioned-trusted-ca + readOnly: true + dnsPolicy: ClusterFirst + enableServiceLinks: false + restartPolicy: Always + schedulerName: default-scheduler + securityContext: {} + serviceAccount: auto-provisioned + serviceAccountName: auto-provisioned + terminationGracePeriodSeconds: 30 + volumes: + - name: kafkauser-auto-provisioned + secret: + defaultMode: 420 + secretName: auto-provisioned + - name: kafkauser-auto-provisioned-cluster-ca + secret: + defaultMode: 420 + secretName: auto-provisioned-cluster-ca-cert + - configMap: + defaultMode: 420 + items: + - key: ca-bundle.crt + path: tls-ca-bundle.pem + name: auto-provisioned-trusted-ca + name: auto-provisioned-trusted-ca status: - readyReplicas: 2 + availableReplicas: 1 + conditions: + - lastTransitionTime: "2023-11-06T09:10:01Z" + lastUpdateTime: "2023-11-06T09:10:01Z" + message: Deployment has minimum availability. + reason: MinimumReplicasAvailable + status: "True" + type: Available + - lastTransitionTime: "2023-11-06T09:09:59Z" + lastUpdateTime: "2023-11-06T09:10:01Z" + message: ReplicaSet "auto-provisioned-ingester-7d68fcf549" has successfully progressed. + reason: NewReplicaSetAvailable + status: "True" + type: Progressing + observedGeneration: 1 + readyReplicas: 1 + replicas: 1 + updatedReplicas: 1 case.go:366: resource Deployment:kuttl-test-hip-dory/auto-provisioned-ingester: .status.readyReplicas: value mismatch, expected: 2 != actual: 1 logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | streaming-with-autoprovisioning-autoscale events from ns kuttl-test-hip-dory: logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:08:12 +0000 UTC Normal Pod elasticsearch-0 Binding Scheduled Successfully assigned kuttl-test-hip-dory/elasticsearch-0 to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:08:12 +0000 UTC Normal StatefulSet.apps elasticsearch SuccessfulCreate create Pod elasticsearch-0 in StatefulSet elasticsearch successful statefulset-controller logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:08:13 +0000 UTC Normal Pod elasticsearch-0 AddedInterface Add eth0 [10.129.2.78/23] from ovn-kubernetes logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:08:13 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulling Pulling image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:08:22 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulled Successfully pulled image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" in 9.036s (9.036s including waiting) kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:08:22 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:08:22 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:08:28 +0000 UTC Warning Pod elasticsearch-0.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Get "http://10.129.2.78:9200/": dial tcp 10.129.2.78:9200: connect: connection refused kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:08:37 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-zookeeper NoPods No matching pods found controllermanager logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:08:37 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:08:37 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-hip-dory/data-auto-provisioned-zookeeper-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-7dbf65d878-d4gkx_b1fdd783-ac60-4eb6-adc0-c462cfce464e logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:08:37 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:08:41 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-hip-dory/auto-provisioned-zookeeper-0 to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:08:41 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-d257d700-6217-4cf1-b240-3f8fe00ec510 ebs.csi.aws.com_aws-ebs-csi-driver-controller-7dbf65d878-d4gkx_b1fdd783-ac60-4eb6-adc0-c462cfce464e logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:08:44 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-d257d700-6217-4cf1-b240-3f8fe00ec510" attachdetach-controller logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:08:46 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 AddedInterface Add eth0 [10.128.2.104/23] from ovn-kubernetes logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:08:46 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" already present on machine kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:08:46 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:08:46 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:08 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-kafka NoPods No matching pods found controllermanager logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:08 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:08 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:08 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-hip-dory/data-0-auto-provisioned-kafka-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-7dbf65d878-d4gkx_b1fdd783-ac60-4eb6-adc0-c462cfce464e logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:11 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-fd07dad7-6eae-4feb-8179-e2978358d946 ebs.csi.aws.com_aws-ebs-csi-driver-controller-7dbf65d878-d4gkx_b1fdd783-ac60-4eb6-adc0-c462cfce464e logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:12 +0000 UTC Normal Pod auto-provisioned-kafka-0 Binding Scheduled Successfully assigned kuttl-test-hip-dory/auto-provisioned-kafka-0 to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:14 +0000 UTC Normal Pod auto-provisioned-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-fd07dad7-6eae-4feb-8179-e2978358d946" attachdetach-controller logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:15 +0000 UTC Normal Pod auto-provisioned-kafka-0 AddedInterface Add eth0 [10.131.0.76/23] from ovn-kubernetes logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:15 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" already present on machine kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:15 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:15 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:36 +0000 UTC Normal Pod auto-provisioned-entity-operator-6dffc6469f-wcvwg Binding Scheduled Successfully assigned kuttl-test-hip-dory/auto-provisioned-entity-operator-6dffc6469f-wcvwg to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:36 +0000 UTC Normal ReplicaSet.apps auto-provisioned-entity-operator-6dffc6469f SuccessfulCreate Created pod: auto-provisioned-entity-operator-6dffc6469f-wcvwg replicaset-controller logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:36 +0000 UTC Normal Deployment.apps auto-provisioned-entity-operator ScalingReplicaSet Scaled up replica set auto-provisioned-entity-operator-6dffc6469f to 1 deployment-controller logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:37 +0000 UTC Normal Pod auto-provisioned-entity-operator-6dffc6469f-wcvwg AddedInterface Add eth0 [10.129.2.79/23] from ovn-kubernetes logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:37 +0000 UTC Normal Pod auto-provisioned-entity-operator-6dffc6469f-wcvwg.spec.containers{topic-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:464b04e622e0b3472e8a1e1ce8a2efd32cf27fc2056d3d589bfe6b5f9ac0bf4e" already present on machine kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:37 +0000 UTC Normal Pod auto-provisioned-entity-operator-6dffc6469f-wcvwg.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:37 +0000 UTC Normal Pod auto-provisioned-entity-operator-6dffc6469f-wcvwg.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:37 +0000 UTC Normal Pod auto-provisioned-entity-operator-6dffc6469f-wcvwg.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:464b04e622e0b3472e8a1e1ce8a2efd32cf27fc2056d3d589bfe6b5f9ac0bf4e" already present on machine kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:37 +0000 UTC Normal Pod auto-provisioned-entity-operator-6dffc6469f-wcvwg.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:37 +0000 UTC Normal Pod auto-provisioned-entity-operator-6dffc6469f-wcvwg.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:37 +0000 UTC Normal Pod auto-provisioned-entity-operator-6dffc6469f-wcvwg.spec.containers{tls-sidecar} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" already present on machine kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:37 +0000 UTC Normal Pod auto-provisioned-entity-operator-6dffc6469f-wcvwg.spec.containers{tls-sidecar} Created Created container tls-sidecar kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:37 +0000 UTC Normal Pod auto-provisioned-entity-operator-6dffc6469f-wcvwg.spec.containers{tls-sidecar} Started Started container tls-sidecar kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:57 +0000 UTC Normal Pod tracegen-647d5488c8-vmm9c Binding Scheduled Successfully assigned kuttl-test-hip-dory/tracegen-647d5488c8-vmm9c to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:57 +0000 UTC Warning Pod tracegen-647d5488c8-vmm9c FailedMount MountVolume.SetUp failed for volume "auto-provisioned-trusted-ca" : configmap "auto-provisioned-trusted-ca" not found kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:57 +0000 UTC Warning Pod tracegen-647d5488c8-vmm9c FailedMount MountVolume.SetUp failed for volume "auto-provisioned-service-ca" : configmap "auto-provisioned-service-ca" not found kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:57 +0000 UTC Normal ReplicaSet.apps tracegen-647d5488c8 SuccessfulCreate Created pod: tracegen-647d5488c8-vmm9c replicaset-controller logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:57 +0000 UTC Normal Deployment.apps tracegen ScalingReplicaSet Scaled up replica set tracegen-647d5488c8 to 1 deployment-controller logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:59 +0000 UTC Normal Pod auto-provisioned-collector-6dd8b9d95b-wxwbd Binding Scheduled Successfully assigned kuttl-test-hip-dory/auto-provisioned-collector-6dd8b9d95b-wxwbd to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:59 +0000 UTC Normal ReplicaSet.apps auto-provisioned-collector-6dd8b9d95b SuccessfulCreate Created pod: auto-provisioned-collector-6dd8b9d95b-wxwbd replicaset-controller logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:59 +0000 UTC Normal Deployment.apps auto-provisioned-collector ScalingReplicaSet Scaled up replica set auto-provisioned-collector-6dd8b9d95b to 1 deployment-controller logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:59 +0000 UTC Normal Pod auto-provisioned-ingester-7d68fcf549-5vsbg Binding Scheduled Successfully assigned kuttl-test-hip-dory/auto-provisioned-ingester-7d68fcf549-5vsbg to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:59 +0000 UTC Normal ReplicaSet.apps auto-provisioned-ingester-7d68fcf549 SuccessfulCreate Created pod: auto-provisioned-ingester-7d68fcf549-5vsbg replicaset-controller logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:59 +0000 UTC Normal Deployment.apps auto-provisioned-ingester ScalingReplicaSet Scaled up replica set auto-provisioned-ingester-7d68fcf549 to 1 deployment-controller logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:59 +0000 UTC Normal Pod auto-provisioned-query-6f4c8548d7-mz46t Binding Scheduled Successfully assigned kuttl-test-hip-dory/auto-provisioned-query-6f4c8548d7-mz46t to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:59 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-6f4c8548d7 SuccessfulCreate Created pod: auto-provisioned-query-6f4c8548d7-mz46t replicaset-controller logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:09:59 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-6f4c8548d7 to 1 deployment-controller logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:00 +0000 UTC Warning Pod auto-provisioned-collector-6dd8b9d95b-wxwbd FailedMount MountVolume.SetUp failed for volume "auto-provisioned-collector-tls-config-volume" : secret "auto-provisioned-collector-headless-tls" not found kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:00 +0000 UTC Normal Pod auto-provisioned-collector-6dd8b9d95b-wxwbd AddedInterface Add eth0 [10.131.0.77/23] from ovn-kubernetes logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:00 +0000 UTC Normal Pod auto-provisioned-collector-6dd8b9d95b-wxwbd.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:00 +0000 UTC Normal Pod auto-provisioned-ingester-7d68fcf549-5vsbg AddedInterface Add eth0 [10.128.2.106/23] from ovn-kubernetes logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:00 +0000 UTC Normal Pod auto-provisioned-ingester-7d68fcf549-5vsbg.spec.containers{jaeger-ingester} Pulled Container image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:109a3676bfaccda0021c57bbe82bceed5140faaedb8bad4d7d2be7cd660de039" already present on machine kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:00 +0000 UTC Normal Pod auto-provisioned-ingester-7d68fcf549-5vsbg.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:00 +0000 UTC Normal Pod auto-provisioned-ingester-7d68fcf549-5vsbg.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:00 +0000 UTC Warning Pod auto-provisioned-query-6f4c8548d7-mz46t FailedMount MountVolume.SetUp failed for volume "auto-provisioned-ui-oauth-proxy-tls" : secret "auto-provisioned-ui-oauth-proxy-tls" not found kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:00 +0000 UTC Normal Pod auto-provisioned-query-6f4c8548d7-mz46t AddedInterface Add eth0 [10.131.0.78/23] from ovn-kubernetes logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:00 +0000 UTC Normal Pod auto-provisioned-query-6f4c8548d7-mz46t.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:01 +0000 UTC Normal Pod auto-provisioned-collector-6dd8b9d95b-wxwbd.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:01 +0000 UTC Normal Pod auto-provisioned-collector-6dd8b9d95b-wxwbd.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:01 +0000 UTC Normal Pod auto-provisioned-query-6f4c8548d7-mz46t.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:01 +0000 UTC Normal Pod auto-provisioned-query-6f4c8548d7-mz46t.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:01 +0000 UTC Normal Pod auto-provisioned-query-6f4c8548d7-mz46t.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:01 +0000 UTC Normal Pod auto-provisioned-query-6f4c8548d7-mz46t.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:01 +0000 UTC Normal Pod auto-provisioned-query-6f4c8548d7-mz46t.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:01 +0000 UTC Normal Pod auto-provisioned-query-6f4c8548d7-mz46t.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:01 +0000 UTC Normal Pod auto-provisioned-query-6f4c8548d7-mz46t.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:01 +0000 UTC Normal Pod auto-provisioned-query-6f4c8548d7-mz46t.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:01 +0000 UTC Normal Pod tracegen-647d5488c8-vmm9c AddedInterface Add eth0 [10.128.2.105/23] from ovn-kubernetes logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:01 +0000 UTC Normal Pod tracegen-647d5488c8-vmm9c.spec.containers{tracegen} Pulling Pulling image "jaegertracing/jaeger-tracegen:1.49.0" kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:05 +0000 UTC Normal Pod tracegen-647d5488c8-vmm9c.spec.containers{tracegen} Pulled Successfully pulled image "jaegertracing/jaeger-tracegen:1.49.0" in 3.942s (3.942s including waiting) kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:05 +0000 UTC Normal Pod tracegen-647d5488c8-vmm9c.spec.containers{tracegen} Created Created container tracegen kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:05 +0000 UTC Normal Pod tracegen-647d5488c8-vmm9c.spec.containers{tracegen} Started Started container tracegen kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:05 +0000 UTC Normal Pod tracegen-647d5488c8-vmm9c.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:05 +0000 UTC Normal Pod tracegen-647d5488c8-vmm9c.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:05 +0000 UTC Normal Pod tracegen-647d5488c8-vmm9c.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:15 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:15 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:15 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:15 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:15 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:15 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:45 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:10:45 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:11:00 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod auto-provisioned-collector-6dd8b9d95b-wxwbd horizontal-pod-autoscaler logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:11:00 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:11:00 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedComputeMetricsReplicas invalid metrics (1 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:15:15 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod auto-provisioned-collector-6dd8b9d95b-wxwbd horizontal-pod-autoscaler logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | 2023-11-06 09:15:15 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-ingester of Pod auto-provisioned-ingester-7d68fcf549-5vsbg horizontal-pod-autoscaler logger.go:42: 09:17:07 | streaming-with-autoprovisioning-autoscale | Deleting namespace: kuttl-test-hip-dory === CONT kuttl/harness/streaming-with-tls logger.go:42: 09:17:24 | streaming-with-tls | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:17:24 | streaming-with-tls | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:17:24 | streaming-with-tls | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:17:24 | streaming-with-tls | Creating namespace: kuttl-test-witty-finch logger.go:42: 09:17:24 | streaming-with-tls/0-install | starting test step 0-install logger.go:42: 09:17:24 | streaming-with-tls/0-install | running command: [sh -c cd /tmp/jaeger-tests && make undeploy-kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true] logger.go:42: 09:17:24 | streaming-with-tls/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 09:17:25 | streaming-with-tls/0-install | >>>> Skiping kafka-operator undeploy logger.go:42: 09:17:25 | streaming-with-tls/0-install | kubectl delete --namespace kuttl-test-witty-finch -f tests/_build/kafka-example.yaml 2>&1 || true logger.go:42: 09:17:25 | streaming-with-tls/0-install | error: the path "tests/_build/kafka-example.yaml" does not exist logger.go:42: 09:17:25 | streaming-with-tls/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 09:17:25 | streaming-with-tls/0-install | running command: [sh -c cd /tmp/jaeger-tests && make kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true KAFKA_VERSION=0.32.0] logger.go:42: 09:17:25 | streaming-with-tls/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 09:17:25 | streaming-with-tls/0-install | >>>> Creating namespace kuttl-test-witty-finch logger.go:42: 09:17:25 | streaming-with-tls/0-install | kubectl create namespace kuttl-test-witty-finch 2>&1 | grep -v "already exists" || true logger.go:42: 09:17:25 | streaming-with-tls/0-install | >>>> Skipping kafka-operator deployment, assuming it has been installed via OperatorHub logger.go:42: 09:17:25 | streaming-with-tls/0-install | >>>> Creating namespace kuttl-test-witty-finch logger.go:42: 09:17:25 | streaming-with-tls/0-install | mkdir -p tests/_build/ logger.go:42: 09:17:25 | streaming-with-tls/0-install | kubectl create namespace kuttl-test-witty-finch 2>&1 | grep -v "already exists" || true logger.go:42: 09:17:25 | streaming-with-tls/0-install | curl --fail --location "https://raw.githubusercontent.com/strimzi/strimzi-kafka-operator/0.32.0/examples/kafka/kafka-persistent-single.yaml" --output tests/_build/kafka-example.yaml --create-dirs logger.go:42: 09:17:25 | streaming-with-tls/0-install | % Total % Received % Xferd Average Speed Time Time Time Current logger.go:42: 09:17:25 | streaming-with-tls/0-install | Dload Upload Total Spent Left Speed logger.go:42: 09:17:25 | streaming-with-tls/0-install | 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 865 100 865 0 0 6321 0 --:--:-- --:--:-- --:--:-- 6313 100 865 100 865 0 0 6319 0 --:--:-- --:--:-- --:--:-- 6313 logger.go:42: 09:17:25 | streaming-with-tls/0-install | "sed" -i 's/size: 100Gi/size: 10Gi/g' tests/_build/kafka-example.yaml logger.go:42: 09:17:25 | streaming-with-tls/0-install | kubectl -n kuttl-test-witty-finch apply --dry-run=client -f tests/_build/kafka-example.yaml logger.go:42: 09:17:25 | streaming-with-tls/0-install | kafka.kafka.strimzi.io/my-cluster created (dry run) logger.go:42: 09:17:25 | streaming-with-tls/0-install | kubectl -n kuttl-test-witty-finch apply -f tests/_build/kafka-example.yaml 2>&1 | grep -v "already exists" || true logger.go:42: 09:17:25 | streaming-with-tls/0-install | kafka.kafka.strimzi.io/my-cluster created logger.go:42: 09:17:25 | streaming-with-tls/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 09:24:26 | streaming-with-tls/0-install | test step failed 0-install case.go:364: failed in step 0-install case.go:366: strimzipodsets.core.strimzi.io "my-cluster-zookeeper" not found logger.go:42: 09:24:26 | streaming-with-tls | streaming-with-tls events from ns kuttl-test-witty-finch: logger.go:42: 09:24:26 | streaming-with-tls | Deleting namespace: kuttl-test-witty-finch === CONT kuttl/harness/streaming-simple logger.go:42: 09:24:31 | streaming-simple | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:24:31 | streaming-simple | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:24:31 | streaming-simple | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:24:31 | streaming-simple | Creating namespace: kuttl-test-thankful-ox logger.go:42: 09:24:31 | streaming-simple/0-install | starting test step 0-install logger.go:42: 09:24:31 | streaming-simple/0-install | running command: [sh -c cd /tmp/jaeger-tests && make undeploy-kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true] logger.go:42: 09:24:31 | streaming-simple/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 09:24:31 | streaming-simple/0-install | >>>> Skiping kafka-operator undeploy logger.go:42: 09:24:31 | streaming-simple/0-install | kubectl delete --namespace kuttl-test-thankful-ox -f tests/_build/kafka-example.yaml 2>&1 || true logger.go:42: 09:24:31 | streaming-simple/0-install | Error from server (NotFound): error when deleting "tests/_build/kafka-example.yaml": kafkas.kafka.strimzi.io "my-cluster" not found logger.go:42: 09:24:31 | streaming-simple/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 09:24:31 | streaming-simple/0-install | running command: [sh -c cd /tmp/jaeger-tests && make kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true KAFKA_VERSION=0.32.0] logger.go:42: 09:24:31 | streaming-simple/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 09:24:31 | streaming-simple/0-install | >>>> Creating namespace kuttl-test-thankful-ox logger.go:42: 09:24:31 | streaming-simple/0-install | kubectl create namespace kuttl-test-thankful-ox 2>&1 | grep -v "already exists" || true logger.go:42: 09:24:32 | streaming-simple/0-install | >>>> Skipping kafka-operator deployment, assuming it has been installed via OperatorHub logger.go:42: 09:24:32 | streaming-simple/0-install | >>>> Creating namespace kuttl-test-thankful-ox logger.go:42: 09:24:32 | streaming-simple/0-install | mkdir -p tests/_build/ logger.go:42: 09:24:32 | streaming-simple/0-install | kubectl create namespace kuttl-test-thankful-ox 2>&1 | grep -v "already exists" || true logger.go:42: 09:24:32 | streaming-simple/0-install | curl --fail --location "https://raw.githubusercontent.com/strimzi/strimzi-kafka-operator/0.32.0/examples/kafka/kafka-persistent-single.yaml" --output tests/_build/kafka-example.yaml --create-dirs logger.go:42: 09:24:32 | streaming-simple/0-install | % Total % Received % Xferd Average Speed Time Time Time Current logger.go:42: 09:24:32 | streaming-simple/0-install | Dload Upload Total Spent Left Speed logger.go:42: 09:24:32 | streaming-simple/0-install | 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 865 100 865 0 0 8098 0 --:--:-- --:--:-- --:--:-- 8160 logger.go:42: 09:24:32 | streaming-simple/0-install | "sed" -i 's/size: 100Gi/size: 10Gi/g' tests/_build/kafka-example.yaml logger.go:42: 09:24:32 | streaming-simple/0-install | kubectl -n kuttl-test-thankful-ox apply --dry-run=client -f tests/_build/kafka-example.yaml logger.go:42: 09:24:32 | streaming-simple/0-install | kafka.kafka.strimzi.io/my-cluster created (dry run) logger.go:42: 09:24:32 | streaming-simple/0-install | kubectl -n kuttl-test-thankful-ox apply -f tests/_build/kafka-example.yaml 2>&1 | grep -v "already exists" || true logger.go:42: 09:24:32 | streaming-simple/0-install | kafka.kafka.strimzi.io/my-cluster created logger.go:42: 09:24:32 | streaming-simple/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 09:31:32 | streaming-simple/0-install | test step failed 0-install case.go:364: failed in step 0-install case.go:366: strimzipodsets.core.strimzi.io "my-cluster-zookeeper" not found logger.go:42: 09:31:32 | streaming-simple | streaming-simple events from ns kuttl-test-thankful-ox: logger.go:42: 09:31:32 | streaming-simple | Deleting namespace: kuttl-test-thankful-ox === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- FAIL: kuttl (1418.66s) --- FAIL: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.74s) --- FAIL: kuttl/harness/streaming-with-autoprovisioning-autoscale (559.20s) --- FAIL: kuttl/harness/streaming-with-tls (426.89s) --- FAIL: kuttl/harness/streaming-simple (426.78s) FAIL + exit_code=1 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name streaming --report --output /logs/artifacts/streaming.xml ./artifacts/kuttl-report.xml time="2023-11-06T09:31:38Z" level=debug msg="Setting a new name for the test suites" time="2023-11-06T09:31:38Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-11-06T09:31:38Z" level=debug msg="normalizing test case names" time="2023-11-06T09:31:38Z" level=debug msg="streaming/artifacts -> streaming_artifacts" time="2023-11-06T09:31:38Z" level=debug msg="streaming/streaming-with-autoprovisioning-autoscale -> streaming_streaming_with_autoprovisioning_autoscale" time="2023-11-06T09:31:38Z" level=debug msg="streaming/streaming-with-tls -> streaming_streaming_with_tls" time="2023-11-06T09:31:38Z" level=debug msg="streaming/streaming-simple -> streaming_streaming_simple" +-----------------------------------------------------+--------+ | NAME | RESULT | +-----------------------------------------------------+--------+ | streaming_artifacts | passed | | streaming_streaming_with_autoprovisioning_autoscale | failed | | streaming_streaming_with_tls | failed | | streaming_streaming_simple | failed | +-----------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 1 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/sidecar.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/streaming.xml + '[' 3 -gt 0 ']' + count=2 + '[' 2 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh ui false true + '[' 3 -ne 3 ']' + test_suite_name=ui + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/ui.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-ui make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true ./tests/e2e/ui/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-04-120954 True False 77m Cluster version is 4.15.0-0.nightly-2023-11-04-120954' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-04-120954 True False 77m Cluster version is 4.15.0-0.nightly-2023-11-04-120954' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/ui/render.sh ++ export SUITE_DIR=./tests/e2e/ui ++ SUITE_DIR=./tests/e2e/ui ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/ui ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test allinone + '[' 1 -ne 1 ']' + test_name=allinone + echo =========================================================================== =========================================================================== + info 'Rendering files for test allinone' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test allinone\e[0m' Rendering files for test allinone + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build + '[' _build '!=' _build ']' + mkdir -p allinone + cd allinone + export GET_URL_COMMAND + export URL + export JAEGER_NAME=all-in-one-ui + JAEGER_NAME=all-in-one-ui + '[' true = true ']' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./01-curl.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./04-test-ui-config.yaml + start_test production + '[' 1 -ne 1 ']' + test_name=production + echo =========================================================================== =========================================================================== + info 'Rendering files for test production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test production\e[0m' Rendering files for test production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build/allinone + '[' allinone '!=' _build ']' + cd .. + mkdir -p production + cd production + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + [[ true = true ]] + [[ true = true ]] + render_install_jaeger production-ui production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + '[' true = true ']' + INSECURE=true + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-forbbiden-access.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-curl.yaml + INSECURE=true + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./05-check-disabled-security.yaml + ASSERT_PRESENT=false + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./06-check-NO-gaID.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./08-check-gaID.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running ui E2E tests' Running ui E2E tests + cd tests/e2e/ui/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1891468343 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 3 tests === RUN kuttl/harness === RUN kuttl/harness/allinone === PAUSE kuttl/harness/allinone === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/production === PAUSE kuttl/harness/production === CONT kuttl/harness/allinone logger.go:42: 09:31:45 | allinone | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:31:45 | allinone | Creating namespace: kuttl-test-guided-basilisk logger.go:42: 09:31:45 | allinone/0-install | starting test step 0-install logger.go:42: 09:31:45 | allinone/0-install | Jaeger:kuttl-test-guided-basilisk/all-in-one-ui created logger.go:42: 09:31:49 | allinone/0-install | test step completed 0-install logger.go:42: 09:31:49 | allinone/1-curl | starting test step 1-curl logger.go:42: 09:31:49 | allinone/1-curl | running command: [./ensure-ingress-host.sh] logger.go:42: 09:31:49 | allinone/1-curl | Checking the Ingress host value was populated logger.go:42: 09:31:49 | allinone/1-curl | Try number 0 logger.go:42: 09:31:49 | allinone/1-curl | error: error executing jsonpath "{.items[0].status.ingress[0].host}": Error executing template: array index out of bounds: index 0, length 0. Printing more information for debugging the template: logger.go:42: 09:31:49 | allinone/1-curl | template was: logger.go:42: 09:31:49 | allinone/1-curl | {.items[0].status.ingress[0].host} logger.go:42: 09:31:49 | allinone/1-curl | object given to jsonpath engine was: logger.go:42: 09:31:49 | allinone/1-curl | map[string]interface {}{"apiVersion":"v1", "items":[]interface {}{}, "kind":"List", "metadata":map[string]interface {}{"resourceVersion":""}} logger.go:42: 09:31:49 | allinone/1-curl | logger.go:42: 09:31:49 | allinone/1-curl | logger.go:42: 09:31:59 | allinone/1-curl | Try number 1 logger.go:42: 09:31:59 | allinone/1-curl | Hostname is all-in-one-ui-kuttl-test-guided-basilisk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com logger.go:42: 09:31:59 | allinone/1-curl | running command: [sh -c ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE all-in-one-ui] logger.go:42: 09:31:59 | allinone/1-curl | Checking an expected HTTP response logger.go:42: 09:31:59 | allinone/1-curl | Running in OpenShift logger.go:42: 09:31:59 | allinone/1-curl | User not provided. Getting the token... logger.go:42: 09:32:01 | allinone/1-curl | Warning: resource jaegers/all-in-one-ui is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 09:32:07 | allinone/1-curl | Try number 1/30 the https://all-in-one-ui-kuttl-test-guided-basilisk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:32:07 | allinone/1-curl | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 09:32:07 | allinone/1-curl | Try number 2/30 the https://all-in-one-ui-kuttl-test-guided-basilisk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:32:07 | allinone/1-curl | curl response asserted properly logger.go:42: 09:32:07 | allinone/1-curl | test step completed 1-curl logger.go:42: 09:32:07 | allinone/2-delete | starting test step 2-delete logger.go:42: 09:32:07 | allinone/2-delete | Jaeger:kuttl-test-guided-basilisk/all-in-one-ui created logger.go:42: 09:32:07 | allinone/2-delete | test step completed 2-delete logger.go:42: 09:32:07 | allinone/3-install | starting test step 3-install logger.go:42: 09:32:07 | allinone/3-install | Jaeger:kuttl-test-guided-basilisk/all-in-one-ui updated logger.go:42: 09:32:07 | allinone/3-install | test step completed 3-install logger.go:42: 09:32:07 | allinone/4-test-ui-config | starting test step 4-test-ui-config logger.go:42: 09:32:07 | allinone/4-test-ui-config | running command: [./ensure-ingress-host.sh] logger.go:42: 09:32:07 | allinone/4-test-ui-config | Checking the Ingress host value was populated logger.go:42: 09:32:07 | allinone/4-test-ui-config | Try number 0 logger.go:42: 09:32:07 | allinone/4-test-ui-config | error: error executing jsonpath "{.items[0].status.ingress[0].host}": Error executing template: array index out of bounds: index 0, length 0. Printing more information for debugging the template: logger.go:42: 09:32:07 | allinone/4-test-ui-config | template was: logger.go:42: 09:32:07 | allinone/4-test-ui-config | {.items[0].status.ingress[0].host} logger.go:42: 09:32:07 | allinone/4-test-ui-config | object given to jsonpath engine was: logger.go:42: 09:32:07 | allinone/4-test-ui-config | map[string]interface {}{"apiVersion":"v1", "items":[]interface {}{}, "kind":"List", "metadata":map[string]interface {}{"resourceVersion":""}} logger.go:42: 09:32:07 | allinone/4-test-ui-config | logger.go:42: 09:32:07 | allinone/4-test-ui-config | logger.go:42: 09:32:17 | allinone/4-test-ui-config | Try number 1 logger.go:42: 09:32:17 | allinone/4-test-ui-config | Hostname is all-in-one-ui-kuttl-test-guided-basilisk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com logger.go:42: 09:32:17 | allinone/4-test-ui-config | running command: [sh -c ASSERT_PRESENT=true EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 09:32:18 | allinone/4-test-ui-config | time="2023-11-06T09:32:18Z" level=info msg="Querying https://all-in-one-ui-kuttl-test-guided-basilisk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search..." logger.go:42: 09:32:18 | allinone/4-test-ui-config | time="2023-11-06T09:32:18Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 09:32:18 | allinone/4-test-ui-config | time="2023-11-06T09:32:18Z" level=info msg="Polling to https://all-in-one-ui-kuttl-test-guided-basilisk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search" logger.go:42: 09:32:18 | allinone/4-test-ui-config | time="2023-11-06T09:32:18Z" level=info msg="Doing request number 0" logger.go:42: 09:32:18 | allinone/4-test-ui-config | time="2023-11-06T09:32:18Z" level=info msg="Content found and asserted!" logger.go:42: 09:32:18 | allinone/4-test-ui-config | time="2023-11-06T09:32:18Z" level=info msg="Success!" logger.go:42: 09:32:18 | allinone/4-test-ui-config | test step completed 4-test-ui-config logger.go:42: 09:32:18 | allinone | allinone events from ns kuttl-test-guided-basilisk: logger.go:42: 09:32:18 | allinone | 2023-11-06 09:31:48 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-6496dffb6f to 1 deployment-controller logger.go:42: 09:32:18 | allinone | 2023-11-06 09:31:49 +0000 UTC Normal Pod all-in-one-ui-6496dffb6f-986ln Binding Scheduled Successfully assigned kuttl-test-guided-basilisk/all-in-one-ui-6496dffb6f-986ln to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:32:18 | allinone | 2023-11-06 09:31:49 +0000 UTC Normal Pod all-in-one-ui-6496dffb6f-986ln AddedInterface Add eth0 [10.129.2.81/23] from ovn-kubernetes logger.go:42: 09:32:18 | allinone | 2023-11-06 09:31:49 +0000 UTC Normal Pod all-in-one-ui-6496dffb6f-986ln.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 09:32:18 | allinone | 2023-11-06 09:31:49 +0000 UTC Normal Pod all-in-one-ui-6496dffb6f-986ln.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:32:18 | allinone | 2023-11-06 09:31:49 +0000 UTC Normal Pod all-in-one-ui-6496dffb6f-986ln.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:32:18 | allinone | 2023-11-06 09:31:49 +0000 UTC Normal Pod all-in-one-ui-6496dffb6f-986ln.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 09:32:18 | allinone | 2023-11-06 09:31:49 +0000 UTC Normal Pod all-in-one-ui-6496dffb6f-986ln.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:32:18 | allinone | 2023-11-06 09:31:49 +0000 UTC Normal Pod all-in-one-ui-6496dffb6f-986ln.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:32:18 | allinone | 2023-11-06 09:31:49 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-6496dffb6f SuccessfulCreate Created pod: all-in-one-ui-6496dffb6f-986ln replicaset-controller logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:02 +0000 UTC Normal Pod all-in-one-ui-6496dffb6f-986ln.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:02 +0000 UTC Normal Pod all-in-one-ui-6496dffb6f-986ln.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:02 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-6496dffb6f SuccessfulDelete Deleted pod: all-in-one-ui-6496dffb6f-986ln replicaset-controller logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:02 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled down replica set all-in-one-ui-6496dffb6f to 0 from 1 deployment-controller logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:03 +0000 UTC Normal Pod all-in-one-ui-76c88fd89f-b55kg Binding Scheduled Successfully assigned kuttl-test-guided-basilisk/all-in-one-ui-76c88fd89f-b55kg to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:03 +0000 UTC Normal Pod all-in-one-ui-76c88fd89f-b55kg AddedInterface Add eth0 [10.128.2.108/23] from ovn-kubernetes logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:03 +0000 UTC Normal Pod all-in-one-ui-76c88fd89f-b55kg.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:03 +0000 UTC Normal Pod all-in-one-ui-76c88fd89f-b55kg.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:03 +0000 UTC Normal Pod all-in-one-ui-76c88fd89f-b55kg.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:03 +0000 UTC Normal Pod all-in-one-ui-76c88fd89f-b55kg.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:03 +0000 UTC Normal Pod all-in-one-ui-76c88fd89f-b55kg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:03 +0000 UTC Normal Pod all-in-one-ui-76c88fd89f-b55kg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:03 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-76c88fd89f SuccessfulCreate Created pod: all-in-one-ui-76c88fd89f-b55kg replicaset-controller logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:03 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-76c88fd89f to 1 deployment-controller logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:07 +0000 UTC Normal Pod all-in-one-ui-76c88fd89f-b55kg.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:07 +0000 UTC Normal Pod all-in-one-ui-76c88fd89f-b55kg.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:13 +0000 UTC Normal Pod all-in-one-ui-68b869fbd7-7rtf6 Binding Scheduled Successfully assigned kuttl-test-guided-basilisk/all-in-one-ui-68b869fbd7-7rtf6 to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:13 +0000 UTC Normal Pod all-in-one-ui-68b869fbd7-7rtf6 AddedInterface Add eth0 [10.129.2.82/23] from ovn-kubernetes logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:13 +0000 UTC Normal Pod all-in-one-ui-68b869fbd7-7rtf6.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:13 +0000 UTC Normal Pod all-in-one-ui-68b869fbd7-7rtf6.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:13 +0000 UTC Normal Pod all-in-one-ui-68b869fbd7-7rtf6.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:13 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-68b869fbd7 SuccessfulCreate Created pod: all-in-one-ui-68b869fbd7-7rtf6 replicaset-controller logger.go:42: 09:32:18 | allinone | 2023-11-06 09:32:13 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-68b869fbd7 to 1 deployment-controller logger.go:42: 09:32:18 | allinone | Deleting namespace: kuttl-test-guided-basilisk === CONT kuttl/harness/production logger.go:42: 09:32:24 | production | Ignoring add-tracking-id.yaml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:32:24 | production | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 09:32:24 | production | Creating namespace: kuttl-test-immune-elk logger.go:42: 09:32:24 | production/1-install | starting test step 1-install logger.go:42: 09:32:24 | production/1-install | Jaeger:kuttl-test-immune-elk/production-ui created logger.go:42: 09:33:00 | production/1-install | test step completed 1-install logger.go:42: 09:33:00 | production/2-check-forbbiden-access | starting test step 2-check-forbbiden-access logger.go:42: 09:33:00 | production/2-check-forbbiden-access | running command: [./ensure-ingress-host.sh] logger.go:42: 09:33:00 | production/2-check-forbbiden-access | Checking the Ingress host value was populated logger.go:42: 09:33:00 | production/2-check-forbbiden-access | Try number 0 logger.go:42: 09:33:00 | production/2-check-forbbiden-access | Hostname is production-ui-kuttl-test-immune-elk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com logger.go:42: 09:33:00 | production/2-check-forbbiden-access | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE production-ui] logger.go:42: 09:33:00 | production/2-check-forbbiden-access | Checking an expected HTTP response logger.go:42: 09:33:00 | production/2-check-forbbiden-access | Running in OpenShift logger.go:42: 09:33:00 | production/2-check-forbbiden-access | Not using any secret logger.go:42: 09:33:00 | production/2-check-forbbiden-access | Try number 1/30 the https://production-ui-kuttl-test-immune-elk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:33:00 | production/2-check-forbbiden-access | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 09:33:00 | production/2-check-forbbiden-access | Try number 2/30 the https://production-ui-kuttl-test-immune-elk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:33:00 | production/2-check-forbbiden-access | HTTP response is 503. 403 expected. Waiting 10 s logger.go:42: 09:33:10 | production/2-check-forbbiden-access | Try number 3/30 the https://production-ui-kuttl-test-immune-elk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:33:10 | production/2-check-forbbiden-access | curl response asserted properly logger.go:42: 09:33:10 | production/2-check-forbbiden-access | test step completed 2-check-forbbiden-access logger.go:42: 09:33:10 | production/3-curl | starting test step 3-curl logger.go:42: 09:33:10 | production/3-curl | running command: [./ensure-ingress-host.sh] logger.go:42: 09:33:10 | production/3-curl | Checking the Ingress host value was populated logger.go:42: 09:33:10 | production/3-curl | Try number 0 logger.go:42: 09:33:10 | production/3-curl | Hostname is production-ui-kuttl-test-immune-elk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com logger.go:42: 09:33:10 | production/3-curl | running command: [sh -c ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE production-ui] logger.go:42: 09:33:10 | production/3-curl | Checking an expected HTTP response logger.go:42: 09:33:10 | production/3-curl | Running in OpenShift logger.go:42: 09:33:10 | production/3-curl | User not provided. Getting the token... logger.go:42: 09:33:12 | production/3-curl | Warning: resource jaegers/production-ui is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 09:33:18 | production/3-curl | Try number 1/30 the https://production-ui-kuttl-test-immune-elk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:33:18 | production/3-curl | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 09:33:18 | production/3-curl | Try number 2/30 the https://production-ui-kuttl-test-immune-elk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:33:18 | production/3-curl | HTTP response is 503. 200 expected. Waiting 10 s logger.go:42: 09:33:28 | production/3-curl | Try number 3/30 the https://production-ui-kuttl-test-immune-elk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:33:28 | production/3-curl | curl response asserted properly logger.go:42: 09:33:28 | production/3-curl | test step completed 3-curl logger.go:42: 09:33:28 | production/4-install | starting test step 4-install logger.go:42: 09:33:28 | production/4-install | Jaeger:kuttl-test-immune-elk/production-ui updated logger.go:42: 09:33:28 | production/4-install | test step completed 4-install logger.go:42: 09:33:28 | production/5-check-disabled-security | starting test step 5-check-disabled-security logger.go:42: 09:33:28 | production/5-check-disabled-security | running command: [./ensure-ingress-host.sh] logger.go:42: 09:33:28 | production/5-check-disabled-security | Checking the Ingress host value was populated logger.go:42: 09:33:28 | production/5-check-disabled-security | Try number 0 logger.go:42: 09:33:28 | production/5-check-disabled-security | Hostname is production-ui-kuttl-test-immune-elk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com logger.go:42: 09:33:28 | production/5-check-disabled-security | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE production-ui] logger.go:42: 09:33:28 | production/5-check-disabled-security | Checking an expected HTTP response logger.go:42: 09:33:28 | production/5-check-disabled-security | Running in OpenShift logger.go:42: 09:33:28 | production/5-check-disabled-security | Not using any secret logger.go:42: 09:33:28 | production/5-check-disabled-security | Try number 1/30 the https://production-ui-kuttl-test-immune-elk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:33:28 | production/5-check-disabled-security | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 09:33:28 | production/5-check-disabled-security | Try number 2/30 the https://production-ui-kuttl-test-immune-elk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:33:28 | production/5-check-disabled-security | HTTP response is 403. 200 expected. Waiting 10 s logger.go:42: 09:33:38 | production/5-check-disabled-security | Try number 3/30 the https://production-ui-kuttl-test-immune-elk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 09:33:38 | production/5-check-disabled-security | curl response asserted properly logger.go:42: 09:33:38 | production/5-check-disabled-security | test step completed 5-check-disabled-security logger.go:42: 09:33:38 | production/6-check-NO-gaID | starting test step 6-check-NO-gaID logger.go:42: 09:33:38 | production/6-check-NO-gaID | running command: [./ensure-ingress-host.sh] logger.go:42: 09:33:38 | production/6-check-NO-gaID | Checking the Ingress host value was populated logger.go:42: 09:33:38 | production/6-check-NO-gaID | Try number 0 logger.go:42: 09:33:38 | production/6-check-NO-gaID | Hostname is production-ui-kuttl-test-immune-elk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com logger.go:42: 09:33:38 | production/6-check-NO-gaID | running command: [sh -c ASSERT_PRESENT=false EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 09:33:39 | production/6-check-NO-gaID | time="2023-11-06T09:33:39Z" level=info msg="Querying https://production-ui-kuttl-test-immune-elk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search..." logger.go:42: 09:33:39 | production/6-check-NO-gaID | time="2023-11-06T09:33:39Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 09:33:39 | production/6-check-NO-gaID | time="2023-11-06T09:33:39Z" level=info msg="Polling to https://production-ui-kuttl-test-immune-elk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search" logger.go:42: 09:33:39 | production/6-check-NO-gaID | time="2023-11-06T09:33:39Z" level=info msg="Doing request number 0" logger.go:42: 09:33:39 | production/6-check-NO-gaID | time="2023-11-06T09:33:39Z" level=info msg="Content not found and asserted it was not found!" logger.go:42: 09:33:39 | production/6-check-NO-gaID | time="2023-11-06T09:33:39Z" level=info msg="Success!" logger.go:42: 09:33:39 | production/6-check-NO-gaID | test step completed 6-check-NO-gaID logger.go:42: 09:33:39 | production/7-add-tracking-id | starting test step 7-add-tracking-id logger.go:42: 09:33:39 | production/7-add-tracking-id | running command: [sh -c kubectl apply -f add-tracking-id.yaml -n $NAMESPACE] logger.go:42: 09:33:39 | production/7-add-tracking-id | jaeger.jaegertracing.io/production-ui configured logger.go:42: 09:33:39 | production/7-add-tracking-id | test step completed 7-add-tracking-id logger.go:42: 09:33:39 | production/8-check-gaID | starting test step 8-check-gaID logger.go:42: 09:33:39 | production/8-check-gaID | running command: [./ensure-ingress-host.sh] logger.go:42: 09:33:39 | production/8-check-gaID | Checking the Ingress host value was populated logger.go:42: 09:33:39 | production/8-check-gaID | Try number 0 logger.go:42: 09:33:39 | production/8-check-gaID | Hostname is production-ui-kuttl-test-immune-elk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com logger.go:42: 09:33:39 | production/8-check-gaID | running command: [sh -c ASSERT_PRESENT=true EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 09:33:39 | production/8-check-gaID | time="2023-11-06T09:33:39Z" level=info msg="Querying https://production-ui-kuttl-test-immune-elk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search..." logger.go:42: 09:33:39 | production/8-check-gaID | time="2023-11-06T09:33:39Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 09:33:39 | production/8-check-gaID | time="2023-11-06T09:33:39Z" level=info msg="Polling to https://production-ui-kuttl-test-immune-elk.apps.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com/search" logger.go:42: 09:33:39 | production/8-check-gaID | time="2023-11-06T09:33:39Z" level=info msg="Doing request number 0" logger.go:42: 09:33:39 | production/8-check-gaID | time="2023-11-06T09:33:39Z" level=warning msg="Found: false . Assert: true" logger.go:42: 09:33:39 | production/8-check-gaID | time="2023-11-06T09:33:39Z" level=warning msg="The condition of the test function was not accomplished" logger.go:42: 09:33:39 | production/8-check-gaID | time="2023-11-06T09:33:39Z" level=info msg="Doing request number 1" logger.go:42: 09:33:39 | production/8-check-gaID | time="2023-11-06T09:33:39Z" level=warning msg="Found: false . Assert: true" logger.go:42: 09:33:39 | production/8-check-gaID | time="2023-11-06T09:33:39Z" level=warning msg="The condition of the test function was not accomplished" logger.go:42: 09:33:47 | production/8-check-gaID | time="2023-11-06T09:33:47Z" level=info msg="Doing request number 2" logger.go:42: 09:33:47 | production/8-check-gaID | time="2023-11-06T09:33:47Z" level=info msg="Content found and asserted!" logger.go:42: 09:33:47 | production/8-check-gaID | time="2023-11-06T09:33:47Z" level=info msg="Success!" logger.go:42: 09:33:47 | production/8-check-gaID | test step completed 8-check-gaID logger.go:42: 09:33:47 | production | production events from ns kuttl-test-immune-elk: logger.go:42: 09:33:47 | production | 2023-11-06 09:32:29 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestimmuneelkproductionui-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestimmuneelkproductionui-1-6b7b48fb78 to 1 deployment-controller logger.go:42: 09:33:47 | production | 2023-11-06 09:32:30 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestimmuneelkproductionui-1-6b7b48fb78 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestimmuneelkproductionui-1-6b7b48fblsfj replicaset-controller logger.go:42: 09:33:47 | production | 2023-11-06 09:32:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmuneelkproductionui-1-6b7b48fblsfj Binding Scheduled Successfully assigned kuttl-test-immune-elk/elasticsearch-cdm-kuttltestimmuneelkproductionui-1-6b7b48fblsfj to ip-10-0-52-106.ec2.internal default-scheduler logger.go:42: 09:33:47 | production | 2023-11-06 09:32:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmuneelkproductionui-1-6b7b48fblsfj AddedInterface Add eth0 [10.128.2.109/23] from ovn-kubernetes logger.go:42: 09:33:47 | production | 2023-11-06 09:32:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmuneelkproductionui-1-6b7b48fblsfj.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel8@sha256:03c856ce4d0866bee1d1852e03c498d49dbd8804a0b511eff684d40cf104d80c" already present on machine kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:32:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmuneelkproductionui-1-6b7b48fblsfj.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:32:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmuneelkproductionui-1-6b7b48fblsfj.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:32:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmuneelkproductionui-1-6b7b48fblsfj.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel8@sha256:ec7cbd383df9895d80b0ffd0508dc21f7948e52e2c8d2b5171d118a7283c25e2" already present on machine kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:32:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmuneelkproductionui-1-6b7b48fblsfj.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:32:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmuneelkproductionui-1-6b7b48fblsfj.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:32:40 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestimmuneelkproductionui-1-6b7b48fblsfj.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:32:45 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestimmuneelkproductionui-1-6b7b48fblsfj.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:32:56 +0000 UTC Normal Pod production-ui-collector-5b7c4bd9bb-bwrff Binding Scheduled Successfully assigned kuttl-test-immune-elk/production-ui-collector-5b7c4bd9bb-bwrff to ip-10-0-91-180.ec2.internal default-scheduler logger.go:42: 09:33:47 | production | 2023-11-06 09:32:56 +0000 UTC Normal ReplicaSet.apps production-ui-collector-5b7c4bd9bb SuccessfulCreate Created pod: production-ui-collector-5b7c4bd9bb-bwrff replicaset-controller logger.go:42: 09:33:47 | production | 2023-11-06 09:32:56 +0000 UTC Normal Deployment.apps production-ui-collector ScalingReplicaSet Scaled up replica set production-ui-collector-5b7c4bd9bb to 1 deployment-controller logger.go:42: 09:33:47 | production | 2023-11-06 09:32:56 +0000 UTC Normal Pod production-ui-query-d57f8bb8c-ggx2p Binding Scheduled Successfully assigned kuttl-test-immune-elk/production-ui-query-d57f8bb8c-ggx2p to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:33:47 | production | 2023-11-06 09:32:56 +0000 UTC Normal ReplicaSet.apps production-ui-query-d57f8bb8c SuccessfulCreate Created pod: production-ui-query-d57f8bb8c-ggx2p replicaset-controller logger.go:42: 09:33:47 | production | 2023-11-06 09:32:56 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-d57f8bb8c to 1 deployment-controller logger.go:42: 09:33:47 | production | 2023-11-06 09:32:57 +0000 UTC Warning Pod production-ui-query-d57f8bb8c-ggx2p FailedMount MountVolume.SetUp failed for volume "production-ui-ui-oauth-proxy-tls" : secret "production-ui-ui-oauth-proxy-tls" not found kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:32:57 +0000 UTC Normal Pod production-ui-query-d57f8bb8c-ggx2p AddedInterface Add eth0 [10.129.2.83/23] from ovn-kubernetes logger.go:42: 09:33:47 | production | 2023-11-06 09:32:57 +0000 UTC Normal Pod production-ui-query-d57f8bb8c-ggx2p.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:32:58 +0000 UTC Warning Pod production-ui-collector-5b7c4bd9bb-bwrff FailedMount MountVolume.SetUp failed for volume "production-ui-collector-tls-config-volume" : failed to sync secret cache: timed out waiting for the condition kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:32:58 +0000 UTC Normal Pod production-ui-collector-5b7c4bd9bb-bwrff AddedInterface Add eth0 [10.131.0.79/23] from ovn-kubernetes logger.go:42: 09:33:47 | production | 2023-11-06 09:32:58 +0000 UTC Normal Pod production-ui-collector-5b7c4bd9bb-bwrff.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:32:58 +0000 UTC Normal Pod production-ui-collector-5b7c4bd9bb-bwrff.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:32:58 +0000 UTC Normal Pod production-ui-collector-5b7c4bd9bb-bwrff.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:32:58 +0000 UTC Normal Pod production-ui-query-d57f8bb8c-ggx2p.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:32:58 +0000 UTC Normal Pod production-ui-query-d57f8bb8c-ggx2p.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:32:58 +0000 UTC Normal Pod production-ui-query-d57f8bb8c-ggx2p.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:32:58 +0000 UTC Normal Pod production-ui-query-d57f8bb8c-ggx2p.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:32:58 +0000 UTC Normal Pod production-ui-query-d57f8bb8c-ggx2p.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:32:58 +0000 UTC Normal Pod production-ui-query-d57f8bb8c-ggx2p.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:32:58 +0000 UTC Normal Pod production-ui-query-d57f8bb8c-ggx2p.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:32:58 +0000 UTC Normal Pod production-ui-query-d57f8bb8c-ggx2p.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:33:47 | production | 2023-11-06 09:33:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:33:47 | production | 2023-11-06 09:33:12 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 09:33:47 | production | 2023-11-06 09:33:13 +0000 UTC Normal Pod production-ui-query-d57f8bb8c-ggx2p.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:13 +0000 UTC Normal Pod production-ui-query-d57f8bb8c-ggx2p.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:13 +0000 UTC Normal Pod production-ui-query-d57f8bb8c-ggx2p.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:13 +0000 UTC Normal ReplicaSet.apps production-ui-query-d57f8bb8c SuccessfulDelete Deleted pod: production-ui-query-d57f8bb8c-ggx2p replicaset-controller logger.go:42: 09:33:47 | production | 2023-11-06 09:33:13 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-d57f8bb8c to 0 from 1 deployment-controller logger.go:42: 09:33:47 | production | 2023-11-06 09:33:14 +0000 UTC Normal Pod production-ui-query-dcf6f8977-hktkd Binding Scheduled Successfully assigned kuttl-test-immune-elk/production-ui-query-dcf6f8977-hktkd to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:33:47 | production | 2023-11-06 09:33:14 +0000 UTC Normal ReplicaSet.apps production-ui-query-dcf6f8977 SuccessfulCreate Created pod: production-ui-query-dcf6f8977-hktkd replicaset-controller logger.go:42: 09:33:47 | production | 2023-11-06 09:33:14 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-dcf6f8977 to 1 deployment-controller logger.go:42: 09:33:47 | production | 2023-11-06 09:33:15 +0000 UTC Normal Pod production-ui-query-dcf6f8977-hktkd AddedInterface Add eth0 [10.129.2.84/23] from ovn-kubernetes logger.go:42: 09:33:47 | production | 2023-11-06 09:33:15 +0000 UTC Normal Pod production-ui-query-dcf6f8977-hktkd.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:15 +0000 UTC Normal Pod production-ui-query-dcf6f8977-hktkd.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:15 +0000 UTC Normal Pod production-ui-query-dcf6f8977-hktkd.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:15 +0000 UTC Normal Pod production-ui-query-dcf6f8977-hktkd.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2eb3192f78339d2c43ecea0973a0ad828764bbbbece950a394ed3dafc5f71b39" already present on machine kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:15 +0000 UTC Normal Pod production-ui-query-dcf6f8977-hktkd.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:15 +0000 UTC Normal Pod production-ui-query-dcf6f8977-hktkd.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:15 +0000 UTC Normal Pod production-ui-query-dcf6f8977-hktkd.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:15 +0000 UTC Normal Pod production-ui-query-dcf6f8977-hktkd.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:15 +0000 UTC Normal Pod production-ui-query-dcf6f8977-hktkd.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:29 +0000 UTC Normal Pod production-ui-query-dcf6f8977-hktkd.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:29 +0000 UTC Normal Pod production-ui-query-dcf6f8977-hktkd.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:29 +0000 UTC Normal Pod production-ui-query-dcf6f8977-hktkd.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:29 +0000 UTC Normal ReplicaSet.apps production-ui-query-dcf6f8977 SuccessfulDelete Deleted pod: production-ui-query-dcf6f8977-hktkd replicaset-controller logger.go:42: 09:33:47 | production | 2023-11-06 09:33:29 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-dcf6f8977 to 0 from 1 deployment-controller logger.go:42: 09:33:47 | production | 2023-11-06 09:33:30 +0000 UTC Normal Pod production-ui-query-7774465c69-vqs9j Binding Scheduled Successfully assigned kuttl-test-immune-elk/production-ui-query-7774465c69-vqs9j to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:33:47 | production | 2023-11-06 09:33:30 +0000 UTC Normal Pod production-ui-query-7774465c69-vqs9j AddedInterface Add eth0 [10.129.2.85/23] from ovn-kubernetes logger.go:42: 09:33:47 | production | 2023-11-06 09:33:30 +0000 UTC Normal Pod production-ui-query-7774465c69-vqs9j.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:30 +0000 UTC Normal Pod production-ui-query-7774465c69-vqs9j.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:30 +0000 UTC Normal Pod production-ui-query-7774465c69-vqs9j.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:30 +0000 UTC Normal Pod production-ui-query-7774465c69-vqs9j.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:30 +0000 UTC Normal ReplicaSet.apps production-ui-query-7774465c69 SuccessfulCreate Created pod: production-ui-query-7774465c69-vqs9j replicaset-controller logger.go:42: 09:33:47 | production | 2023-11-06 09:33:30 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-7774465c69 to 1 deployment-controller logger.go:42: 09:33:47 | production | 2023-11-06 09:33:31 +0000 UTC Normal Pod production-ui-query-7774465c69-vqs9j.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:31 +0000 UTC Normal Pod production-ui-query-7774465c69-vqs9j.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:40 +0000 UTC Normal Pod production-ui-query-7774465c69-vqs9j.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:40 +0000 UTC Normal Pod production-ui-query-7774465c69-vqs9j.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:40 +0000 UTC Normal ReplicaSet.apps production-ui-query-7774465c69 SuccessfulDelete Deleted pod: production-ui-query-7774465c69-vqs9j replicaset-controller logger.go:42: 09:33:47 | production | 2023-11-06 09:33:40 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-7774465c69 to 0 from 1 deployment-controller logger.go:42: 09:33:47 | production | 2023-11-06 09:33:41 +0000 UTC Normal Pod production-ui-query-84cfb6c8d6-kj9z2 Binding Scheduled Successfully assigned kuttl-test-immune-elk/production-ui-query-84cfb6c8d6-kj9z2 to ip-10-0-45-129.ec2.internal default-scheduler logger.go:42: 09:33:47 | production | 2023-11-06 09:33:41 +0000 UTC Normal Pod production-ui-query-84cfb6c8d6-kj9z2 AddedInterface Add eth0 [10.129.2.86/23] from ovn-kubernetes logger.go:42: 09:33:47 | production | 2023-11-06 09:33:41 +0000 UTC Normal Pod production-ui-query-84cfb6c8d6-kj9z2.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:41 +0000 UTC Normal Pod production-ui-query-84cfb6c8d6-kj9z2.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:41 +0000 UTC Normal Pod production-ui-query-84cfb6c8d6-kj9z2.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:41 +0000 UTC Normal Pod production-ui-query-84cfb6c8d6-kj9z2.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:41 +0000 UTC Normal ReplicaSet.apps production-ui-query-84cfb6c8d6 SuccessfulCreate Created pod: production-ui-query-84cfb6c8d6-kj9z2 replicaset-controller logger.go:42: 09:33:47 | production | 2023-11-06 09:33:41 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-84cfb6c8d6 to 1 deployment-controller logger.go:42: 09:33:47 | production | 2023-11-06 09:33:42 +0000 UTC Normal Pod production-ui-query-84cfb6c8d6-kj9z2.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 09:33:47 | production | 2023-11-06 09:33:42 +0000 UTC Normal Pod production-ui-query-84cfb6c8d6-kj9z2.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 09:33:47 | production | Deleting namespace: kuttl-test-immune-elk === CONT kuttl/harness/artifacts logger.go:42: 09:33:54 | artifacts | Creating namespace: kuttl-test-clever-mole logger.go:42: 09:33:54 | artifacts | artifacts events from ns kuttl-test-clever-mole: logger.go:42: 09:33:54 | artifacts | Deleting namespace: kuttl-test-clever-mole === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (134.60s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/allinone (38.72s) --- PASS: kuttl/harness/production (90.07s) --- PASS: kuttl/harness/artifacts (5.77s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name ui --report --output /logs/artifacts/ui.xml ./artifacts/kuttl-report.xml time="2023-11-06T09:34:00Z" level=debug msg="Setting a new name for the test suites" time="2023-11-06T09:34:00Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-11-06T09:34:00Z" level=debug msg="normalizing test case names" time="2023-11-06T09:34:00Z" level=debug msg="ui/allinone -> ui_allinone" time="2023-11-06T09:34:00Z" level=debug msg="ui/production -> ui_production" time="2023-11-06T09:34:00Z" level=debug msg="ui/artifacts -> ui_artifacts" +---------------+--------+ | NAME | RESULT | +---------------+--------+ | ui_allinone | passed | | ui_production | passed | | ui_artifacts | passed | +---------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 1 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/sidecar.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/streaming.xml + '[' 3 -gt 0 ']' + count=2 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/ui.xml + '[' 0 -gt 0 ']' + '[' 2 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh upgrade false true + '[' 3 -ne 3 ']' + test_suite_name=upgrade + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/upgrade.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-upgrade make[2]: Entering directory '/tmp/jaeger-tests' make docker JAEGER_VERSION=1.49.1 IMG="quay.io//jaeger-operator:next" make[3]: Entering directory '/tmp/jaeger-tests' [ ! -z "true" ] || docker build --build-arg=GOPROXY= --build-arg=VERSION="1.49.0" --build-arg=JAEGER_VERSION=1.49.1 --build-arg=TARGETARCH= --build-arg VERSION_DATE=2023-11-06T09:34:00Z --build-arg VERSION_PKG="github.com/jaegertracing/jaeger-operator/pkg/version" -t "quay.io//jaeger-operator:next" . make[3]: Leaving directory '/tmp/jaeger-tests' touch build-e2e-upgrade-image SKIP_ES_EXTERNAL=true IMG=quay.io//jaeger-operator:"1.49.0" JAEGER_OPERATOR_VERSION="1.49.0" JAEGER_VERSION="1.49.0" ./tests/e2e/upgrade/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-04-120954 True False 79m Cluster version is 4.15.0-0.nightly-2023-11-04-120954' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-11-04-120954 True False 79m Cluster version is 4.15.0-0.nightly-2023-11-04-120954' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/upgrade/render.sh ++ export SUITE_DIR=./tests/e2e/upgrade ++ SUITE_DIR=./tests/e2e/upgrade ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/upgrade ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + export JAEGER_NAME + '[' true = true ']' + skip_test upgrade 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade + warning 'upgrade: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade: Test not supported in OpenShift\e[0m' WAR: upgrade: Test not supported in OpenShift + '[' true = true ']' + skip_test upgrade-from-latest-release 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade-from-latest-release + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade-from-latest-release + warning 'upgrade-from-latest-release: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade-from-latest-release: Test not supported in OpenShift\e[0m' WAR: upgrade-from-latest-release: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running upgrade E2E tests' Running upgrade E2E tests + cd tests/e2e/upgrade/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1891468343 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-4qrjzj65-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 420 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 09:34:01 | artifacts | Creating namespace: kuttl-test-apparent-leech logger.go:42: 09:34:01 | artifacts | artifacts events from ns kuttl-test-apparent-leech: logger.go:42: 09:34:01 | artifacts | Deleting namespace: kuttl-test-apparent-leech === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (5.80s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.75s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name upgrade --report --output /logs/artifacts/upgrade.xml ./artifacts/kuttl-report.xml time="2023-11-06T09:34:07Z" level=debug msg="Setting a new name for the test suites" time="2023-11-06T09:34:07Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-11-06T09:34:07Z" level=debug msg="normalizing test case names" time="2023-11-06T09:34:07Z" level=debug msg="upgrade/artifacts -> upgrade_artifacts" +-------------------+--------+ | NAME | RESULT | +-------------------+--------+ | upgrade_artifacts | passed | +-------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' + count=0 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/elasticsearch.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/examples.xml + '[' 1 -gt 0 ']' + count=1 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/generate.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/miscellaneous.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/sidecar.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/streaming.xml + '[' 3 -gt 0 ']' + count=2 + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/ui.xml + '[' 0 -gt 0 ']' + for file in $ARTIFACT_DIR/* ++ grep -c 'failure message' /logs/artifacts/upgrade.xml + '[' 0 -gt 0 ']' + '[' 2 -gt 3 ']' + exit 0 make[1]: Leaving directory '/tmp/jaeger-tests'