% Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 3831 100 3831 0 0 28508 0 --:--:-- --:--:-- --:--:-- 28589 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 1953 100 1953 0 0 18859 0 --:--:-- --:--:-- --:--:-- 18961 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 350 100 350 0 0 3270 0 --:--:-- --:--:-- --:--:-- 3301 Installing kuttl Try 0... curl -sLo /tmp/jaeger-tests/hack/install/../../bin/kubectl-kuttl https://github.com/kudobuilder/kuttl/releases/download/v0.15.0/kubectl-kuttl_0.15.0_linux_x86_64 KUBECONFIG file is: /tmp/kubeconfig-3803946269 for suite in elasticsearch examples generate miscellaneous sidecar streaming ui upgrade; do \ make run-e2e-tests-$suite ; \ done make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh elasticsearch false true + '[' 3 -ne 3 ']' + test_suite_name=elasticsearch + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/elasticsearch.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-elasticsearch make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true \ KAFKA_VERSION=3.6.0 \ SKIP_KAFKA=false \ ./tests/e2e/elasticsearch/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-17-173511 True False 7m41s Cluster version is 4.15.0-0.nightly-2023-12-17-173511' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-17-173511 True False 7m41s Cluster version is 4.15.0-0.nightly-2023-12-17-173511' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/elasticsearch/render.sh ++ export SUITE_DIR=./tests/e2e/elasticsearch ++ SUITE_DIR=./tests/e2e/elasticsearch ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/elasticsearch ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + start_test es-from-aio-to-production + '[' 1 -ne 1 ']' + test_name=es-from-aio-to-production + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-from-aio-to-production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-from-aio-to-production\e[0m' Rendering files for test es-from-aio-to-production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-from-aio-to-production + cd es-from-aio-to-production + jaeger_name=my-jaeger + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 03 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=03 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./03-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch.redundancyPolicy="ZeroRedundancy"' ./03-install.yaml + render_smoke_test my-jaeger true 04 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=04 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./04-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test es-increasing-replicas + '[' 1 -ne 1 ']' + test_name=es-increasing-replicas + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-increasing-replicas' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-increasing-replicas\e[0m' Rendering files for test es-increasing-replicas + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-from-aio-to-production + '[' es-from-aio-to-production '!=' _build ']' + cd .. + mkdir -p es-increasing-replicas + cd es-increasing-replicas + jaeger_name=simple-prod + '[' true = true ']' + jaeger_deployment_mode=production_autoprovisioned + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.replicas=2 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.query.replicas=2 ./02-install.yaml + cp ./01-assert.yaml ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.replicas=2 ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .status.readyReplicas=2 ./02-assert.yaml + render_smoke_test simple-prod true 03 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=03 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./03-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + cp ./02-install.yaml ./04-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.elasticsearch.nodeCount=2 ./04-install.yaml + /tmp/jaeger-tests/bin/gomplate -f ./openshift-check-es-nodes.yaml.template -o ./05-check-es-nodes.yaml + '[' true = true ']' + skip_test es-index-cleaner-upstream 'SKIP_ES_EXTERNAL is true' + '[' 2 -ne 2 ']' + test_name=es-index-cleaner-upstream + message='SKIP_ES_EXTERNAL is true' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-increasing-replicas + '[' es-increasing-replicas '!=' _build ']' + cd .. + rm -rf es-index-cleaner-upstream + warning 'es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true\e[0m' WAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true + '[' true = true ']' + es_index_cleaner -autoprov production_autoprovisioned + '[' 2 -ne 2 ']' + postfix=-autoprov + jaeger_deployment_strategy=production_autoprovisioned + start_test es-index-cleaner-autoprov + '[' 1 -ne 1 ']' + test_name=es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-index-cleaner-autoprov' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-index-cleaner-autoprov\e[0m' Rendering files for test es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-index-cleaner-autoprov + cd es-index-cleaner-autoprov + jaeger_name=test-es-index-cleaner-with-prefix + cronjob_name=test-es-index-cleaner-with-prefix-es-index-cleaner + secured_es_connection=false + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_managed_es ']' + ELASTICSEARCH_URL=https://elasticsearch + secured_es_connection=true + cp ../../es-index-cleaner-upstream/04-assert.yaml ../../es-index-cleaner-upstream/README.md . + render_install_jaeger test-es-index-cleaner-with-prefix production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options.es.index-prefix=""' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.enabled=false ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.numberOfDays=0 ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.esIndexCleaner.schedule="*/1 * * * *"' ./01-install.yaml + render_report_spans test-es-index-cleaner-with-prefix true 5 00 true 02 + '[' 6 -ne 6 ']' + jaeger=test-es-index-cleaner-with-prefix + is_secured=true + number_of_spans=5 + job_number=00 + ensure_reported_spans=true + test_step=02 + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + export JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + export JOB_NUMBER=00 + JOB_NUMBER=00 + export DAYS=5 + DAYS=5 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./02-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./02-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + sed 's~enabled: false~enabled: true~gi' ./01-install.yaml + CRONJOB_NAME=test-es-index-cleaner-with-prefix-es-index-cleaner + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/wait-for-cronjob-execution.yaml.template -o ./04-wait-es-index-cleaner.yaml + /tmp/jaeger-tests/bin/gomplate -f ./01-install.yaml -o ./05-install.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' 00 06 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + job_number=00 + test_step=06 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=test-es-index-cleaner-with-prefix-curator + JOB_NUMBER=00 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + MOUNT_SECRET=test-es-index-cleaner-with-prefix-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./06-check-indices.yaml + JOB_NUMBER=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./06-assert.yaml + '[' true = true ']' + get_elasticsearch_openshift_operator_version + export ESO_OPERATOR_VERSION + '[' true = true ']' ++ kubectl get pods -l name=elasticsearch-operator --all-namespaces '-o=jsonpath={.items[0].metadata.annotations.operatorframework\.io/properties}' + properties='{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.1"}}]}' + '[' -z '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.1"}}]}' ']' ++ echo '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.1"}}]}' ++ /tmp/jaeger-tests/bin/yq e -P '.properties.[] | select(.value.packageName == "elasticsearch-operator") | .value.version' + ESO_OPERATOR_VERSION=5.8.1 ++ version_ge 5.8.1 5.4 +++ echo 5.8.1 5.4 +++ tr ' ' '\n' +++ sort -rV +++ head -n 1 ++ test 5.8.1 == 5.8.1 + '[' -n '' ']' + skip_test es-index-cleaner-managed 'Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 2 -ne 2 ']' + test_name=es-index-cleaner-managed + message='Test only supported with Elasticsearch OpenShift Operator >= 5.4' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-index-cleaner-autoprov + '[' es-index-cleaner-autoprov '!=' _build ']' + cd .. + rm -rf es-index-cleaner-managed + warning 'es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4\e[0m' WAR: es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4 + '[' true = true ']' + start_test es-multiinstance + '[' 1 -ne 1 ']' + test_name=es-multiinstance + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-multiinstance' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-multiinstance\e[0m' Rendering files for test es-multiinstance + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-multiinstance + cd es-multiinstance + jaeger_name=instance-1 + render_install_jaeger instance-1 production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=instance-1 + JAEGER_NAME=instance-1 + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/gomplate -f ./03-create-second-instance.yaml.template -o 03-create-second-instance.yaml + '[' true = true ']' + skip_test es-rollover-upstream 'SKIP_ES_EXTERNAL is true' + '[' 2 -ne 2 ']' + test_name=es-rollover-upstream + message='SKIP_ES_EXTERNAL is true' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-multiinstance + '[' es-multiinstance '!=' _build ']' + cd .. + rm -rf es-rollover-upstream + warning 'es-rollover-upstream: SKIP_ES_EXTERNAL is true' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-rollover-upstream: SKIP_ES_EXTERNAL is true\e[0m' WAR: es-rollover-upstream: SKIP_ES_EXTERNAL is true + '[' true = true ']' + es_rollover -autoprov production_autoprovisioned + '[' 2 -ne 2 ']' + postfix=-autoprov + jaeger_deployment_strategy=production_autoprovisioned + start_test es-rollover-autoprov + '[' 1 -ne 1 ']' + test_name=es-rollover-autoprov + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-rollover-autoprov' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-rollover-autoprov\e[0m' Rendering files for test es-rollover-autoprov + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-rollover-autoprov + cd es-rollover-autoprov + cp ../../es-rollover-upstream/05-assert.yaml ../../es-rollover-upstream/05-install.yaml ../../es-rollover-upstream/README.md . + jaeger_name=my-jaeger + secured_es_connection=false + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_managed_es ']' + ELASTICSEARCH_URL=https://elasticsearch + secured_es_connection=true + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_report_spans my-jaeger true 2 00 true 02 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=00 + ensure_reported_spans=true + test_step=02 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=00 + JOB_NUMBER=00 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./02-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./02-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' 00 03 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + job_number=00 + test_step=03 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=00 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./03-check-indices.yaml + JOB_NUMBER=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./03-assert.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' 01 04 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + job_number=01 + test_step=04 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=01 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./04-check-indices.yaml + JOB_NUMBER=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./04-assert.yaml + render_report_spans my-jaeger true 2 02 true 06 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=02 + ensure_reported_spans=true + test_step=06 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=02 + JOB_NUMBER=02 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./06-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./06-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' 02 07 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + job_number=02 + test_step=07 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=02 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./07-check-indices.yaml + JOB_NUMBER=02 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./07-assert.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' 03 08 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + job_number=03 + test_step=08 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=03 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./08-check-indices.yaml + JOB_NUMBER=03 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./08-assert.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' 04 09 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + job_number=04 + test_step=09 + escape_command ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=04 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./09-check-indices.yaml + JOB_NUMBER=04 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./09-assert.yaml + render_report_spans my-jaeger true 2 03 true 10 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=03 + ensure_reported_spans=true + test_step=10 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=03 + JOB_NUMBER=03 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./10-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./10-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + CRONJOB_NAME=my-jaeger-es-rollover + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/wait-for-cronjob-execution.yaml.template -o ./11-wait-rollover.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-000002'\'',' 05 11 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-000002'\'',' + job_number=05 + test_step=11 + escape_command ''\''--name'\'', '\''jaeger-span-000002'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-000002'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-000002'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-000002'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-000002'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=05 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-000002'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./11-check-indices.yaml + JOB_NUMBER=05 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./11-assert.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' 06 12 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + job_number=06 + test_step=12 + escape_command ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=06 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./12-check-indices.yaml + JOB_NUMBER=06 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./12-assert.yaml + '[' true = true ']' + get_elasticsearch_openshift_operator_version + export ESO_OPERATOR_VERSION + '[' true = true ']' ++ kubectl get pods -l name=elasticsearch-operator --all-namespaces '-o=jsonpath={.items[0].metadata.annotations.operatorframework\.io/properties}' + properties='{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.1"}}]}' + '[' -z '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.1"}}]}' ']' ++ echo '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.1"}}]}' ++ /tmp/jaeger-tests/bin/yq e -P '.properties.[] | select(.value.packageName == "elasticsearch-operator") | .value.version' + ESO_OPERATOR_VERSION=5.8.1 ++ version_ge 5.8.1 5.4 +++ echo 5.8.1 5.4 +++ tr ' ' '\n' +++ sort -rV +++ head -n 1 ++ test 5.8.1 == 5.8.1 + '[' -n '' ']' + skip_test es-rollover-managed 'Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 2 -ne 2 ']' + test_name=es-rollover-managed + message='Test only supported with Elasticsearch OpenShift Operator >= 5.4' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-rollover-autoprov + '[' es-rollover-autoprov '!=' _build ']' + cd .. + rm -rf es-rollover-managed + warning 'es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4\e[0m' WAR: es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4 + '[' true = true ']' + skip_test es-spark-dependencies 'This test is not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=es-spark-dependencies + message='This test is not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + rm -rf es-spark-dependencies + warning 'es-spark-dependencies: This test is not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-spark-dependencies: This test is not supported in OpenShift\e[0m' WAR: es-spark-dependencies: This test is not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running elasticsearch E2E tests' Running elasticsearch E2E tests + cd tests/e2e/elasticsearch/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3803946269 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 7 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/es-from-aio-to-production === PAUSE kuttl/harness/es-from-aio-to-production === RUN kuttl/harness/es-increasing-replicas === PAUSE kuttl/harness/es-increasing-replicas === RUN kuttl/harness/es-index-cleaner-autoprov === PAUSE kuttl/harness/es-index-cleaner-autoprov === RUN kuttl/harness/es-multiinstance === PAUSE kuttl/harness/es-multiinstance === RUN kuttl/harness/es-rollover-autoprov === PAUSE kuttl/harness/es-rollover-autoprov === RUN kuttl/harness/es-simple-prod === PAUSE kuttl/harness/es-simple-prod === CONT kuttl/harness/artifacts logger.go:42: 06:46:08 | artifacts | Creating namespace: kuttl-test-fluent-ape logger.go:42: 06:46:08 | artifacts | artifacts events from ns kuttl-test-fluent-ape: logger.go:42: 06:46:08 | artifacts | Deleting namespace: kuttl-test-fluent-ape === CONT kuttl/harness/es-multiinstance logger.go:42: 06:46:14 | es-multiinstance | Ignoring 03-create-second-instance.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:46:14 | es-multiinstance | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:46:14 | es-multiinstance | Creating namespace: kuttl-test-tender-seasnail logger.go:42: 06:46:14 | es-multiinstance/0-clear-namespace | starting test step 0-clear-namespace logger.go:42: 06:46:14 | es-multiinstance/0-clear-namespace | running command: [sh -c kubectl delete namespace jaeger-e2e-multiinstance-test --ignore-not-found=true] logger.go:42: 06:46:14 | es-multiinstance/0-clear-namespace | test step completed 0-clear-namespace logger.go:42: 06:46:14 | es-multiinstance/1-install | starting test step 1-install logger.go:42: 06:46:14 | es-multiinstance/1-install | Jaeger:kuttl-test-tender-seasnail/instance-1 created logger.go:42: 06:47:09 | es-multiinstance/1-install | test step completed 1-install logger.go:42: 06:47:09 | es-multiinstance/2-create-namespace | starting test step 2-create-namespace logger.go:42: 06:47:09 | es-multiinstance/2-create-namespace | running command: [sh -c kubectl create namespace jaeger-e2e-multiinstance-test] logger.go:42: 06:47:09 | es-multiinstance/2-create-namespace | namespace/jaeger-e2e-multiinstance-test created logger.go:42: 06:47:09 | es-multiinstance/2-create-namespace | test step completed 2-create-namespace logger.go:42: 06:47:09 | es-multiinstance/3-create-second-instance | starting test step 3-create-second-instance logger.go:42: 06:47:09 | es-multiinstance/3-create-second-instance | running command: [sh -c kubectl apply -f ./01-install.yaml -n jaeger-e2e-multiinstance-test] logger.go:42: 06:47:11 | es-multiinstance/3-create-second-instance | jaeger.jaegertracing.io/instance-1 created logger.go:42: 06:47:11 | es-multiinstance/3-create-second-instance | running command: [sh -c /tmp/jaeger-tests/bin/kubectl-kuttl assert ./01-assert.yaml -n jaeger-e2e-multiinstance-test --timeout 1000] logger.go:42: 06:48:00 | es-multiinstance/3-create-second-instance | assert is valid logger.go:42: 06:48:00 | es-multiinstance/3-create-second-instance | test step completed 3-create-second-instance logger.go:42: 06:48:00 | es-multiinstance/4-check-secrets | starting test step 4-check-secrets logger.go:42: 06:48:00 | es-multiinstance/4-check-secrets | running command: [sh -c kubectl get secrets elasticsearch -o jsonpath='{.data.logging-es\.crt}' -n $NAMESPACE > secret1] logger.go:42: 06:48:00 | es-multiinstance/4-check-secrets | running command: [sh -c kubectl get secrets elasticsearch -o jsonpath='{.data.logging-es\.crt}' -n jaeger-e2e-multiinstance-test > secret2] logger.go:42: 06:48:01 | es-multiinstance/4-check-secrets | running command: [sh -c cmp --silent secret1 secret2 || exit 0] logger.go:42: 06:48:01 | es-multiinstance/4-check-secrets | test step completed 4-check-secrets logger.go:42: 06:48:01 | es-multiinstance/5-delete | starting test step 5-delete logger.go:42: 06:48:01 | es-multiinstance/5-delete | running command: [sh -c kubectl delete namespace jaeger-e2e-multiinstance-test --wait=false] logger.go:42: 06:48:01 | es-multiinstance/5-delete | namespace "jaeger-e2e-multiinstance-test" deleted logger.go:42: 06:48:01 | es-multiinstance/5-delete | test step completed 5-delete logger.go:42: 06:48:01 | es-multiinstance | es-multiinstance events from ns kuttl-test-tender-seasnail: logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttenderseasnailinstance1-1-64c868g6r9 Binding Scheduled Successfully assigned kuttl-test-tender-seasnail/elasticsearch-cdm-kuttltesttenderseasnailinstance1-1-64c868g6r9 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:20 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltesttenderseasnailinstance1-1-64c86f454c SuccessfulCreate Created pod: elasticsearch-cdm-kuttltesttenderseasnailinstance1-1-64c868g6r9 replicaset-controller logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:20 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltesttenderseasnailinstance1-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltesttenderseasnailinstance1-1-64c86f454c to 1 deployment-controller logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:21 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttenderseasnailinstance1-1-64c868g6r9 AddedInterface Add eth0 [10.129.2.14/23] from ovn-kubernetes logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:21 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttenderseasnailinstance1-1-64c868g6r9.spec.containers{elasticsearch} Pulling Pulling image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:4587958c428b40ac31b46a96a752c2b338814895891023b3ba96ce4c12b5906d" kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttenderseasnailinstance1-1-64c868g6r9.spec.containers{elasticsearch} Pulled Successfully pulled image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:4587958c428b40ac31b46a96a752c2b338814895891023b3ba96ce4c12b5906d" in 6.367s (6.367s including waiting) kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttenderseasnailinstance1-1-64c868g6r9.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttenderseasnailinstance1-1-64c868g6r9.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttenderseasnailinstance1-1-64c868g6r9.spec.containers{proxy} Pulling Pulling image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:8c14e8afb359f1b4ea4b8ed370d15d3b0c272b39ba3b90e5b314f3ccb5ac69c5" kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttenderseasnailinstance1-1-64c868g6r9.spec.containers{proxy} Pulled Successfully pulled image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:8c14e8afb359f1b4ea4b8ed370d15d3b0c272b39ba3b90e5b314f3ccb5ac69c5" in 2.641s (2.641s including waiting) kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttenderseasnailinstance1-1-64c868g6r9.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttenderseasnailinstance1-1-64c868g6r9.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:41 +0000 UTC Warning Pod elasticsearch-cdm-kuttltesttenderseasnailinstance1-1-64c868g6r9.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:52 +0000 UTC Normal Pod instance-1-collector-d8c58ddfd-76tbp Binding Scheduled Successfully assigned kuttl-test-tender-seasnail/instance-1-collector-d8c58ddfd-76tbp to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:52 +0000 UTC Normal Pod instance-1-collector-d8c58ddfd-76tbp AddedInterface Add eth0 [10.131.0.27/23] from ovn-kubernetes logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:52 +0000 UTC Normal Pod instance-1-collector-d8c58ddfd-76tbp.spec.containers{jaeger-collector} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:52 +0000 UTC Normal ReplicaSet.apps instance-1-collector-d8c58ddfd SuccessfulCreate Created pod: instance-1-collector-d8c58ddfd-76tbp replicaset-controller logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:52 +0000 UTC Normal Deployment.apps instance-1-collector ScalingReplicaSet Scaled up replica set instance-1-collector-d8c58ddfd to 1 deployment-controller logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:52 +0000 UTC Normal Pod instance-1-query-5fd7b7bdd8-lgw95 Binding Scheduled Successfully assigned kuttl-test-tender-seasnail/instance-1-query-5fd7b7bdd8-lgw95 to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:52 +0000 UTC Normal Pod instance-1-query-5fd7b7bdd8-lgw95 AddedInterface Add eth0 [10.128.2.20/23] from ovn-kubernetes logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:52 +0000 UTC Normal Pod instance-1-query-5fd7b7bdd8-lgw95.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:52 +0000 UTC Normal ReplicaSet.apps instance-1-query-5fd7b7bdd8 SuccessfulCreate Created pod: instance-1-query-5fd7b7bdd8-lgw95 replicaset-controller logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:52 +0000 UTC Normal Deployment.apps instance-1-query ScalingReplicaSet Scaled up replica set instance-1-query-5fd7b7bdd8 to 1 deployment-controller logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:54 +0000 UTC Normal Pod instance-1-collector-d8c58ddfd-76tbp.spec.containers{jaeger-collector} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" in 1.666s (1.666s including waiting) kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:54 +0000 UTC Normal Pod instance-1-collector-d8c58ddfd-76tbp.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:54 +0000 UTC Normal Pod instance-1-collector-d8c58ddfd-76tbp.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:58 +0000 UTC Normal Pod instance-1-query-5fd7b7bdd8-lgw95.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" in 5.514s (5.514s including waiting) kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:58 +0000 UTC Normal Pod instance-1-query-5fd7b7bdd8-lgw95.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:58 +0000 UTC Normal Pod instance-1-query-5fd7b7bdd8-lgw95.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:46:58 +0000 UTC Normal Pod instance-1-query-5fd7b7bdd8-lgw95.spec.containers{oauth-proxy} Pulling Pulling image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:47:03 +0000 UTC Normal Pod instance-1-query-5fd7b7bdd8-lgw95.spec.containers{oauth-proxy} Pulled Successfully pulled image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" in 5.069s (5.069s including waiting) kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:47:03 +0000 UTC Normal Pod instance-1-query-5fd7b7bdd8-lgw95.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:47:03 +0000 UTC Normal Pod instance-1-query-5fd7b7bdd8-lgw95.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:47:03 +0000 UTC Normal Pod instance-1-query-5fd7b7bdd8-lgw95.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:47:06 +0000 UTC Normal Pod instance-1-query-5fd7b7bdd8-lgw95.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" in 2.55s (2.55s including waiting) kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:47:06 +0000 UTC Normal Pod instance-1-query-5fd7b7bdd8-lgw95.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:47:06 +0000 UTC Normal Pod instance-1-query-5fd7b7bdd8-lgw95.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:47:08 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:47:09 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:47:09 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:47:54 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:47:54 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod instance-1-collector-d8c58ddfd-76tbp horizontal-pod-autoscaler logger.go:42: 06:48:01 | es-multiinstance | 2023-12-18 06:47:54 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 06:48:01 | es-multiinstance | Deleting namespace: kuttl-test-tender-seasnail === CONT kuttl/harness/es-simple-prod logger.go:42: 06:48:08 | es-simple-prod | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:48:08 | es-simple-prod | Creating namespace: kuttl-test-climbing-goose logger.go:42: 06:48:09 | es-simple-prod | es-simple-prod events from ns kuttl-test-climbing-goose: logger.go:42: 06:48:09 | es-simple-prod | Deleting namespace: kuttl-test-climbing-goose === CONT kuttl/harness/es-rollover-autoprov logger.go:42: 06:48:15 | es-rollover-autoprov | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:48:15 | es-rollover-autoprov | Creating namespace: kuttl-test-certain-ibex logger.go:42: 06:48:15 | es-rollover-autoprov/1-install | starting test step 1-install logger.go:42: 06:48:15 | es-rollover-autoprov/1-install | Jaeger:kuttl-test-certain-ibex/my-jaeger created logger.go:42: 06:48:50 | es-rollover-autoprov/1-install | test step completed 1-install logger.go:42: 06:48:50 | es-rollover-autoprov/2-report-spans | starting test step 2-report-spans logger.go:42: 06:48:50 | es-rollover-autoprov/2-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 06:48:52 | es-rollover-autoprov/2-report-spans | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 06:48:59 | es-rollover-autoprov/2-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d JOB_NUMBER=00 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-00-job.yaml] logger.go:42: 06:49:00 | es-rollover-autoprov/2-report-spans | running command: [sh -c kubectl apply -f report-span-00-job.yaml -n $NAMESPACE] logger.go:42: 06:49:00 | es-rollover-autoprov/2-report-spans | job.batch/00-report-span created logger.go:42: 06:49:26 | es-rollover-autoprov/2-report-spans | test step completed 2-report-spans logger.go:42: 06:49:26 | es-rollover-autoprov/3-check-indices | starting test step 3-check-indices logger.go:42: 06:49:26 | es-rollover-autoprov/3-check-indices | Job:kuttl-test-certain-ibex/00-check-indices created logger.go:42: 06:49:30 | es-rollover-autoprov/3-check-indices | test step completed 3-check-indices logger.go:42: 06:49:30 | es-rollover-autoprov/4-check-indices | starting test step 4-check-indices logger.go:42: 06:49:30 | es-rollover-autoprov/4-check-indices | Job:kuttl-test-certain-ibex/01-check-indices created logger.go:42: 06:49:34 | es-rollover-autoprov/4-check-indices | test step completed 4-check-indices logger.go:42: 06:49:34 | es-rollover-autoprov/5-install | starting test step 5-install logger.go:42: 06:49:34 | es-rollover-autoprov/5-install | Jaeger:kuttl-test-certain-ibex/my-jaeger updated logger.go:42: 06:49:46 | es-rollover-autoprov/5-install | test step completed 5-install logger.go:42: 06:49:46 | es-rollover-autoprov/6-report-spans | starting test step 6-report-spans logger.go:42: 06:49:46 | es-rollover-autoprov/6-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 06:49:55 | es-rollover-autoprov/6-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d JOB_NUMBER=02 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-02-job.yaml] logger.go:42: 06:49:56 | es-rollover-autoprov/6-report-spans | running command: [sh -c kubectl apply -f report-span-02-job.yaml -n $NAMESPACE] logger.go:42: 06:49:56 | es-rollover-autoprov/6-report-spans | job.batch/02-report-span created logger.go:42: 06:50:20 | es-rollover-autoprov/6-report-spans | test step completed 6-report-spans logger.go:42: 06:50:20 | es-rollover-autoprov/7-check-indices | starting test step 7-check-indices logger.go:42: 06:50:20 | es-rollover-autoprov/7-check-indices | Job:kuttl-test-certain-ibex/02-check-indices created logger.go:42: 06:50:24 | es-rollover-autoprov/7-check-indices | test step completed 7-check-indices logger.go:42: 06:50:24 | es-rollover-autoprov/8-check-indices | starting test step 8-check-indices logger.go:42: 06:50:24 | es-rollover-autoprov/8-check-indices | Job:kuttl-test-certain-ibex/03-check-indices created logger.go:42: 06:50:28 | es-rollover-autoprov/8-check-indices | test step completed 8-check-indices logger.go:42: 06:50:28 | es-rollover-autoprov/9-check-indices | starting test step 9-check-indices logger.go:42: 06:50:28 | es-rollover-autoprov/9-check-indices | Job:kuttl-test-certain-ibex/04-check-indices created logger.go:42: 06:50:32 | es-rollover-autoprov/9-check-indices | test step completed 9-check-indices logger.go:42: 06:50:32 | es-rollover-autoprov/10-report-spans | starting test step 10-report-spans logger.go:42: 06:50:32 | es-rollover-autoprov/10-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 06:50:40 | es-rollover-autoprov/10-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d JOB_NUMBER=03 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-03-job.yaml] logger.go:42: 06:50:40 | es-rollover-autoprov/10-report-spans | running command: [sh -c kubectl apply -f report-span-03-job.yaml -n $NAMESPACE] logger.go:42: 06:50:41 | es-rollover-autoprov/10-report-spans | job.batch/03-report-span created logger.go:42: 06:51:04 | es-rollover-autoprov/10-report-spans | test step completed 10-report-spans logger.go:42: 06:51:04 | es-rollover-autoprov/11-check-indices | starting test step 11-check-indices logger.go:42: 06:51:04 | es-rollover-autoprov/11-check-indices | running command: [sh -c go run ../../../../cmd-utils/wait-cronjob/main.go --cronjob my-jaeger-es-rollover --namespace $NAMESPACE] logger.go:42: 06:51:15 | es-rollover-autoprov/11-check-indices | time="2023-12-18T06:51:15Z" level=debug msg="Checking if the my-jaeger-es-rollover CronJob exists" logger.go:42: 06:51:15 | es-rollover-autoprov/11-check-indices | time="2023-12-18T06:51:15Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 06:51:15 | es-rollover-autoprov/11-check-indices | time="2023-12-18T06:51:15Z" level=info msg="Cronjob my-jaeger-es-rollover found successfully" logger.go:42: 06:51:15 | es-rollover-autoprov/11-check-indices | time="2023-12-18T06:51:15Z" level=debug msg="Waiting for the next scheduled job from my-jaeger-es-rollover cronjob" logger.go:42: 06:51:15 | es-rollover-autoprov/11-check-indices | time="2023-12-18T06:51:15Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 06:51:15 | es-rollover-autoprov/11-check-indices | time="2023-12-18T06:51:15Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 06:51:25 | es-rollover-autoprov/11-check-indices | time="2023-12-18T06:51:25Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 06:51:35 | es-rollover-autoprov/11-check-indices | time="2023-12-18T06:51:35Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 06:51:45 | es-rollover-autoprov/11-check-indices | time="2023-12-18T06:51:45Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 06:51:55 | es-rollover-autoprov/11-check-indices | time="2023-12-18T06:51:55Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 06:52:05 | es-rollover-autoprov/11-check-indices | time="2023-12-18T06:52:05Z" level=info msg="Job of owner my-jaeger-es-rollover succeeded after my-jaeger-es-rollover 50.035753798s" logger.go:42: 06:52:05 | es-rollover-autoprov/11-check-indices | Job:kuttl-test-certain-ibex/05-check-indices created logger.go:42: 06:52:09 | es-rollover-autoprov/11-check-indices | test step completed 11-check-indices logger.go:42: 06:52:09 | es-rollover-autoprov/12-check-indices | starting test step 12-check-indices logger.go:42: 06:52:09 | es-rollover-autoprov/12-check-indices | Job:kuttl-test-certain-ibex/06-check-indices created logger.go:42: 06:52:13 | es-rollover-autoprov/12-check-indices | test step completed 12-check-indices logger.go:42: 06:52:13 | es-rollover-autoprov | es-rollover-autoprov events from ns kuttl-test-certain-ibex: logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:21 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestcertainibexmyjaeger-1-6f65c988c SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestcertainibexmyjaeger-1-6f65c988cfw7t6 replicaset-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:21 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcertainibexmyjaeger-1-6f65c988cfw7t6 Binding Scheduled Successfully assigned kuttl-test-certain-ibex/elasticsearch-cdm-kuttltestcertainibexmyjaeger-1-6f65c988cfw7t6 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:21 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcertainibexmyjaeger-1-6f65c988cfw7t6 AddedInterface Add eth0 [10.129.2.15/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:21 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcertainibexmyjaeger-1-6f65c988cfw7t6.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:4587958c428b40ac31b46a96a752c2b338814895891023b3ba96ce4c12b5906d" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:21 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcertainibexmyjaeger-1-6f65c988cfw7t6.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:21 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcertainibexmyjaeger-1-6f65c988cfw7t6.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:21 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcertainibexmyjaeger-1-6f65c988cfw7t6.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:8c14e8afb359f1b4ea4b8ed370d15d3b0c272b39ba3b90e5b314f3ccb5ac69c5" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:21 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestcertainibexmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestcertainibexmyjaeger-1-6f65c988c to 1 deployment-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcertainibexmyjaeger-1-6f65c988cfw7t6.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcertainibexmyjaeger-1-6f65c988cfw7t6.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:31 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestcertainibexmyjaeger-1-6f65c988cfw7t6.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:37 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestcertainibexmyjaeger-1-6f65c988cfw7t6.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:48 +0000 UTC Normal Pod my-jaeger-collector-bb44fdcd8-fksqq Binding Scheduled Successfully assigned kuttl-test-certain-ibex/my-jaeger-collector-bb44fdcd8-fksqq to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:48 +0000 UTC Normal Pod my-jaeger-collector-bb44fdcd8-fksqq AddedInterface Add eth0 [10.128.2.23/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:48 +0000 UTC Normal Pod my-jaeger-collector-bb44fdcd8-fksqq.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:48 +0000 UTC Normal Pod my-jaeger-collector-bb44fdcd8-fksqq.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:48 +0000 UTC Normal Pod my-jaeger-collector-bb44fdcd8-fksqq.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:48 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-bb44fdcd8 SuccessfulCreate Created pod: my-jaeger-collector-bb44fdcd8-fksqq replicaset-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:48 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-bb44fdcd8 to 1 deployment-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:48 +0000 UTC Normal Pod my-jaeger-query-75d569f8d9-r5zqc Binding Scheduled Successfully assigned kuttl-test-certain-ibex/my-jaeger-query-75d569f8d9-r5zqc to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:48 +0000 UTC Normal Pod my-jaeger-query-75d569f8d9-r5zqc AddedInterface Add eth0 [10.128.2.24/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:48 +0000 UTC Normal Pod my-jaeger-query-75d569f8d9-r5zqc.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:48 +0000 UTC Normal Pod my-jaeger-query-75d569f8d9-r5zqc.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:48 +0000 UTC Normal Pod my-jaeger-query-75d569f8d9-r5zqc.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:48 +0000 UTC Normal Pod my-jaeger-query-75d569f8d9-r5zqc.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:48 +0000 UTC Normal Pod my-jaeger-query-75d569f8d9-r5zqc.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:48 +0000 UTC Normal Pod my-jaeger-query-75d569f8d9-r5zqc.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:48 +0000 UTC Normal Pod my-jaeger-query-75d569f8d9-r5zqc.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:48 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-75d569f8d9 SuccessfulCreate Created pod: my-jaeger-query-75d569f8d9-r5zqc replicaset-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:48 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-75d569f8d9 to 1 deployment-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:49 +0000 UTC Normal Pod my-jaeger-query-75d569f8d9-r5zqc.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:49 +0000 UTC Normal Pod my-jaeger-query-75d569f8d9-r5zqc.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:54 +0000 UTC Normal Pod my-jaeger-query-75d569f8d9-r5zqc.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:54 +0000 UTC Normal Pod my-jaeger-query-75d569f8d9-r5zqc.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:54 +0000 UTC Normal Pod my-jaeger-query-75d569f8d9-r5zqc.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:54 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-75d569f8d9 SuccessfulDelete Deleted pod: my-jaeger-query-75d569f8d9-r5zqc replicaset-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:54 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-75d569f8d9 to 0 from 1 deployment-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:55 +0000 UTC Normal Pod my-jaeger-query-7ccb689cc8-rbs7w Binding Scheduled Successfully assigned kuttl-test-certain-ibex/my-jaeger-query-7ccb689cc8-rbs7w to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:55 +0000 UTC Normal Pod my-jaeger-query-7ccb689cc8-rbs7w AddedInterface Add eth0 [10.128.2.25/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:55 +0000 UTC Normal Pod my-jaeger-query-7ccb689cc8-rbs7w.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:55 +0000 UTC Normal Pod my-jaeger-query-7ccb689cc8-rbs7w.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:55 +0000 UTC Normal Pod my-jaeger-query-7ccb689cc8-rbs7w.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:55 +0000 UTC Normal Pod my-jaeger-query-7ccb689cc8-rbs7w.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:55 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-7ccb689cc8 SuccessfulCreate Created pod: my-jaeger-query-7ccb689cc8-rbs7w replicaset-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:55 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-7ccb689cc8 to 1 deployment-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:56 +0000 UTC Normal Pod my-jaeger-query-7ccb689cc8-rbs7w.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:56 +0000 UTC Normal Pod my-jaeger-query-7ccb689cc8-rbs7w.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:56 +0000 UTC Normal Pod my-jaeger-query-7ccb689cc8-rbs7w.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:56 +0000 UTC Normal Pod my-jaeger-query-7ccb689cc8-rbs7w.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:48:56 +0000 UTC Normal Pod my-jaeger-query-7ccb689cc8-rbs7w.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:00 +0000 UTC Normal Pod 00-report-span-pztwd Binding Scheduled Successfully assigned kuttl-test-certain-ibex/00-report-span-pztwd to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:00 +0000 UTC Normal Pod 00-report-span-pztwd AddedInterface Add eth0 [10.131.0.29/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:00 +0000 UTC Normal Pod 00-report-span-pztwd.spec.containers{asserts-container} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:00 +0000 UTC Normal Job.batch 00-report-span SuccessfulCreate Created pod: 00-report-span-pztwd job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:02 +0000 UTC Normal Pod 00-report-span-pztwd.spec.containers{asserts-container} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" in 1.79s (1.79s including waiting) kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:02 +0000 UTC Normal Pod 00-report-span-pztwd.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:02 +0000 UTC Normal Pod 00-report-span-pztwd.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:03 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:03 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:03 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:26 +0000 UTC Normal Pod 00-check-indices-86t2g Binding Scheduled Successfully assigned kuttl-test-certain-ibex/00-check-indices-86t2g to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:26 +0000 UTC Normal Job.batch 00-check-indices SuccessfulCreate Created pod: 00-check-indices-86t2g job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:26 +0000 UTC Normal Job.batch 00-report-span Completed Job completed job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:27 +0000 UTC Normal Pod 00-check-indices-86t2g AddedInterface Add eth0 [10.131.0.30/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:27 +0000 UTC Normal Pod 00-check-indices-86t2g.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:27 +0000 UTC Normal Pod 00-check-indices-86t2g.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:27 +0000 UTC Normal Pod 00-check-indices-86t2g.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:30 +0000 UTC Normal Job.batch 00-check-indices Completed Job completed job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:30 +0000 UTC Normal Pod 01-check-indices-n5xf9 Binding Scheduled Successfully assigned kuttl-test-certain-ibex/01-check-indices-n5xf9 to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:30 +0000 UTC Normal Job.batch 01-check-indices SuccessfulCreate Created pod: 01-check-indices-n5xf9 job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:31 +0000 UTC Normal Pod 01-check-indices-n5xf9 AddedInterface Add eth0 [10.131.0.31/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:31 +0000 UTC Normal Pod 01-check-indices-n5xf9.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:31 +0000 UTC Normal Pod 01-check-indices-n5xf9.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:31 +0000 UTC Normal Pod 01-check-indices-n5xf9.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:33 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:33 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod my-jaeger-collector-bb44fdcd8-fksqq horizontal-pod-autoscaler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:33 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:34 +0000 UTC Normal Job.batch 01-check-indices Completed Job completed job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:36 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-f55rk Binding Scheduled Successfully assigned kuttl-test-certain-ibex/my-jaeger-es-rollover-create-mapping-f55rk to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:36 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-f55rk AddedInterface Add eth0 [10.131.0.32/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:36 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-f55rk.spec.containers{my-jaeger-es-rollover-create-mapping} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:36 +0000 UTC Normal Job.batch my-jaeger-es-rollover-create-mapping SuccessfulCreate Created pod: my-jaeger-es-rollover-create-mapping-f55rk job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:42 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-f55rk.spec.containers{my-jaeger-es-rollover-create-mapping} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" in 5.749s (5.749s including waiting) kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:42 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-f55rk.spec.containers{my-jaeger-es-rollover-create-mapping} Created Created container my-jaeger-es-rollover-create-mapping kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:42 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-f55rk.spec.containers{my-jaeger-es-rollover-create-mapping} Started Started container my-jaeger-es-rollover-create-mapping kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:45 +0000 UTC Normal Job.batch my-jaeger-es-rollover-create-mapping Completed Job completed job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:46 +0000 UTC Normal Pod my-jaeger-collector-6448c666bf-qxnbn Binding Scheduled Successfully assigned kuttl-test-certain-ibex/my-jaeger-collector-6448c666bf-qxnbn to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:46 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-6448c666bf SuccessfulCreate Created pod: my-jaeger-collector-6448c666bf-qxnbn replicaset-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:46 +0000 UTC Normal Pod my-jaeger-collector-bb44fdcd8-fksqq.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:46 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-bb44fdcd8 SuccessfulDelete Deleted pod: my-jaeger-collector-bb44fdcd8-fksqq replicaset-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:46 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled down replica set my-jaeger-collector-bb44fdcd8 to 0 from 1 deployment-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:46 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-6448c666bf to 1 deployment-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:46 +0000 UTC Normal Pod my-jaeger-query-7ccb689cc8-rbs7w.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:46 +0000 UTC Normal Pod my-jaeger-query-7ccb689cc8-rbs7w.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:46 +0000 UTC Normal Pod my-jaeger-query-7ccb689cc8-rbs7w.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:46 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-7ccb689cc8 SuccessfulDelete Deleted pod: my-jaeger-query-7ccb689cc8-rbs7w replicaset-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:46 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-7ccb689cc8 to 0 from 1 deployment-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:47 +0000 UTC Normal Pod my-jaeger-collector-6448c666bf-qxnbn AddedInterface Add eth0 [10.131.0.33/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:47 +0000 UTC Normal Pod my-jaeger-collector-6448c666bf-qxnbn.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:47 +0000 UTC Normal Pod my-jaeger-collector-6448c666bf-qxnbn.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:47 +0000 UTC Normal Pod my-jaeger-collector-6448c666bf-qxnbn.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:47 +0000 UTC Normal Pod my-jaeger-query-587d56f77c-8vhlk Binding Scheduled Successfully assigned kuttl-test-certain-ibex/my-jaeger-query-587d56f77c-8vhlk to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:47 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-587d56f77c SuccessfulCreate Created pod: my-jaeger-query-587d56f77c-8vhlk replicaset-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:47 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-587d56f77c to 1 deployment-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:48 +0000 UTC Warning Endpoints my-jaeger-collector-headless FailedToUpdateEndpoint Failed to update endpoint kuttl-test-certain-ibex/my-jaeger-collector-headless: Operation cannot be fulfilled on endpoints "my-jaeger-collector-headless": the object has been modified; please apply your changes to the latest version and try again endpoint-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:48 +0000 UTC Warning Endpoints my-jaeger-collector FailedToUpdateEndpoint Failed to update endpoint kuttl-test-certain-ibex/my-jaeger-collector: Operation cannot be fulfilled on endpoints "my-jaeger-collector": the object has been modified; please apply your changes to the latest version and try again endpoint-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:48 +0000 UTC Normal Pod my-jaeger-query-587d56f77c-8vhlk AddedInterface Add eth0 [10.128.2.26/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:48 +0000 UTC Normal Pod my-jaeger-query-587d56f77c-8vhlk.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:48 +0000 UTC Normal Pod my-jaeger-query-587d56f77c-8vhlk.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:48 +0000 UTC Normal Pod my-jaeger-query-587d56f77c-8vhlk.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:48 +0000 UTC Normal Pod my-jaeger-query-587d56f77c-8vhlk.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:48 +0000 UTC Normal Pod my-jaeger-query-587d56f77c-8vhlk.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:48 +0000 UTC Normal Pod my-jaeger-query-587d56f77c-8vhlk.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:48 +0000 UTC Normal Pod my-jaeger-query-587d56f77c-8vhlk.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:48 +0000 UTC Normal Pod my-jaeger-query-587d56f77c-8vhlk.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:48 +0000 UTC Normal Pod my-jaeger-query-587d56f77c-8vhlk.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:56 +0000 UTC Normal Pod 02-report-span-z9pz8 Binding Scheduled Successfully assigned kuttl-test-certain-ibex/02-report-span-z9pz8 to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:56 +0000 UTC Normal Job.batch 02-report-span SuccessfulCreate Created pod: 02-report-span-z9pz8 job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:57 +0000 UTC Normal Pod 02-report-span-z9pz8 AddedInterface Add eth0 [10.131.0.34/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:57 +0000 UTC Normal Pod 02-report-span-z9pz8.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:57 +0000 UTC Normal Pod 02-report-span-z9pz8.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:49:57 +0000 UTC Normal Pod 02-report-span-z9pz8.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28381370-bhbpw Binding Scheduled Successfully assigned kuttl-test-certain-ibex/my-jaeger-es-lookback-28381370-bhbpw to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28381370-bhbpw AddedInterface Add eth0 [10.131.0.35/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28381370-bhbpw.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28381370-bhbpw.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28381370-bhbpw.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28381370 SuccessfulCreate Created pod: my-jaeger-es-lookback-28381370-bhbpw job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28381370 cronjob-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28381370-5msfr Binding Scheduled Successfully assigned kuttl-test-certain-ibex/my-jaeger-es-rollover-28381370-5msfr to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28381370-5msfr AddedInterface Add eth0 [10.128.2.27/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28381370-5msfr.spec.containers{my-jaeger-es-rollover} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28381370 SuccessfulCreate Created pod: my-jaeger-es-rollover-28381370-5msfr job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28381370 cronjob-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28381370 Completed Job completed job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28381370, status: Complete cronjob-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:03 +0000 UTC Normal Pod my-jaeger-es-rollover-28381370-5msfr.spec.containers{my-jaeger-es-rollover} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" in 3.03s (3.03s including waiting) kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:03 +0000 UTC Normal Pod my-jaeger-es-rollover-28381370-5msfr.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:03 +0000 UTC Normal Pod my-jaeger-es-rollover-28381370-5msfr.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:06 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28381370 Completed Job completed job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:06 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28381370, status: Complete cronjob-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:20 +0000 UTC Normal Pod 02-check-indices-4f7xv Binding Scheduled Successfully assigned kuttl-test-certain-ibex/02-check-indices-4f7xv to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:20 +0000 UTC Normal Job.batch 02-check-indices SuccessfulCreate Created pod: 02-check-indices-4f7xv job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:20 +0000 UTC Normal Job.batch 02-report-span Completed Job completed job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:21 +0000 UTC Normal Pod 02-check-indices-4f7xv AddedInterface Add eth0 [10.131.0.36/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:21 +0000 UTC Normal Pod 02-check-indices-4f7xv.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:21 +0000 UTC Normal Pod 02-check-indices-4f7xv.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:21 +0000 UTC Normal Pod 02-check-indices-4f7xv.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:24 +0000 UTC Normal Job.batch 02-check-indices Completed Job completed job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:24 +0000 UTC Normal Pod 03-check-indices-fw8d6 Binding Scheduled Successfully assigned kuttl-test-certain-ibex/03-check-indices-fw8d6 to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:24 +0000 UTC Normal Job.batch 03-check-indices SuccessfulCreate Created pod: 03-check-indices-fw8d6 job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:25 +0000 UTC Normal Pod 03-check-indices-fw8d6 AddedInterface Add eth0 [10.131.0.37/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:25 +0000 UTC Normal Pod 03-check-indices-fw8d6.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:25 +0000 UTC Normal Pod 03-check-indices-fw8d6.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:25 +0000 UTC Normal Pod 03-check-indices-fw8d6.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:28 +0000 UTC Normal Job.batch 03-check-indices Completed Job completed job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:28 +0000 UTC Normal Pod 04-check-indices-dp5rf Binding Scheduled Successfully assigned kuttl-test-certain-ibex/04-check-indices-dp5rf to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:28 +0000 UTC Normal Job.batch 04-check-indices SuccessfulCreate Created pod: 04-check-indices-dp5rf job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:29 +0000 UTC Normal Pod 04-check-indices-dp5rf AddedInterface Add eth0 [10.131.0.38/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:29 +0000 UTC Normal Pod 04-check-indices-dp5rf.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:29 +0000 UTC Normal Pod 04-check-indices-dp5rf.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:29 +0000 UTC Normal Pod 04-check-indices-dp5rf.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:32 +0000 UTC Normal Job.batch 04-check-indices Completed Job completed job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:41 +0000 UTC Normal Pod 03-report-span-6zqh8 Binding Scheduled Successfully assigned kuttl-test-certain-ibex/03-report-span-6zqh8 to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:41 +0000 UTC Normal Pod 03-report-span-6zqh8 AddedInterface Add eth0 [10.131.0.39/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:41 +0000 UTC Normal Pod 03-report-span-6zqh8.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:41 +0000 UTC Normal Pod 03-report-span-6zqh8.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:41 +0000 UTC Normal Pod 03-report-span-6zqh8.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:41 +0000 UTC Normal Job.batch 03-report-span SuccessfulCreate Created pod: 03-report-span-6zqh8 job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:50:48 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod my-jaeger-collector-6448c666bf-qxnbn horizontal-pod-autoscaler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:51:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28381371-nl7vj Binding Scheduled Successfully assigned kuttl-test-certain-ibex/my-jaeger-es-lookback-28381371-nl7vj to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:51:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28381371-nl7vj AddedInterface Add eth0 [10.131.0.40/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:51:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28381371-nl7vj.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:51:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28381371-nl7vj.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:51:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28381371-nl7vj.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:51:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28381371 SuccessfulCreate Created pod: my-jaeger-es-lookback-28381371-nl7vj job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:51:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28381371 cronjob-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:51:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28381371-7pz9n Binding Scheduled Successfully assigned kuttl-test-certain-ibex/my-jaeger-es-rollover-28381371-7pz9n to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:51:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28381371-7pz9n AddedInterface Add eth0 [10.128.2.28/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:51:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28381371-7pz9n.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:51:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28381371-7pz9n.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:51:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28381371-7pz9n.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:51:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28381371 SuccessfulCreate Created pod: my-jaeger-es-rollover-28381371-7pz9n job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:51:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28381371 cronjob-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:51:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28381371 Completed Job completed job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:51:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28381371, status: Complete cronjob-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:51:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28381371 Completed Job completed job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:51:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28381371, status: Complete cronjob-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:51:04 +0000 UTC Normal Job.batch 03-report-span Completed Job completed job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28381372-tgnzb Binding Scheduled Successfully assigned kuttl-test-certain-ibex/my-jaeger-es-lookback-28381372-tgnzb to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28381372-tgnzb AddedInterface Add eth0 [10.131.0.42/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28381372-tgnzb.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28381372-tgnzb.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28381372-tgnzb.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28381372 SuccessfulCreate Created pod: my-jaeger-es-lookback-28381372-tgnzb job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28381372 cronjob-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28381372-mnmq2 Binding Scheduled Successfully assigned kuttl-test-certain-ibex/my-jaeger-es-rollover-28381372-mnmq2 to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28381372-mnmq2 AddedInterface Add eth0 [10.131.0.41/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28381372-mnmq2.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:011593d9ab270edcf57a484b7106c1682984a35131a92deabcb944aefb180619" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28381372-mnmq2.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28381372-mnmq2.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28381372 SuccessfulCreate Created pod: my-jaeger-es-rollover-28381372-mnmq2 job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28381372 cronjob-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28381372 Completed Job completed job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28381372, status: Complete cronjob-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28381372 Completed Job completed job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28381372, status: Complete cronjob-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:05 +0000 UTC Normal Pod 05-check-indices-xdf4x Binding Scheduled Successfully assigned kuttl-test-certain-ibex/05-check-indices-xdf4x to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:05 +0000 UTC Normal Pod 05-check-indices-xdf4x AddedInterface Add eth0 [10.131.0.43/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:05 +0000 UTC Normal Pod 05-check-indices-xdf4x.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:05 +0000 UTC Normal Pod 05-check-indices-xdf4x.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:05 +0000 UTC Normal Pod 05-check-indices-xdf4x.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:05 +0000 UTC Normal Job.batch 05-check-indices SuccessfulCreate Created pod: 05-check-indices-xdf4x job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:08 +0000 UTC Normal Job.batch 05-check-indices Completed Job completed job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:09 +0000 UTC Normal Pod 06-check-indices-szxxd Binding Scheduled Successfully assigned kuttl-test-certain-ibex/06-check-indices-szxxd to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:09 +0000 UTC Normal Pod 06-check-indices-szxxd AddedInterface Add eth0 [10.131.0.44/23] from ovn-kubernetes logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:09 +0000 UTC Normal Pod 06-check-indices-szxxd.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:09 +0000 UTC Normal Job.batch 06-check-indices SuccessfulCreate Created pod: 06-check-indices-szxxd job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:10 +0000 UTC Normal Pod 06-check-indices-szxxd.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:10 +0000 UTC Normal Pod 06-check-indices-szxxd.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:52:13 | es-rollover-autoprov | 2023-12-18 06:52:12 +0000 UTC Normal Job.batch 06-check-indices Completed Job completed job-controller logger.go:42: 06:52:13 | es-rollover-autoprov | Deleting namespace: kuttl-test-certain-ibex === CONT kuttl/harness/es-increasing-replicas logger.go:42: 06:52:22 | es-increasing-replicas | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:52:22 | es-increasing-replicas | Ignoring check-es-nodes.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:52:22 | es-increasing-replicas | Ignoring openshift-check-es-nodes.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:52:22 | es-increasing-replicas | Creating namespace: kuttl-test-vital-tortoise logger.go:42: 06:52:22 | es-increasing-replicas/1-install | starting test step 1-install logger.go:42: 06:52:22 | es-increasing-replicas/1-install | Jaeger:kuttl-test-vital-tortoise/simple-prod created logger.go:42: 06:52:57 | es-increasing-replicas/1-install | test step completed 1-install logger.go:42: 06:52:57 | es-increasing-replicas/2-install | starting test step 2-install logger.go:42: 06:52:57 | es-increasing-replicas/2-install | Jaeger:kuttl-test-vital-tortoise/simple-prod updated logger.go:42: 06:53:11 | es-increasing-replicas/2-install | test step completed 2-install logger.go:42: 06:53:11 | es-increasing-replicas/3-smoke-test | starting test step 3-smoke-test logger.go:42: 06:53:11 | es-increasing-replicas/3-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 06:53:13 | es-increasing-replicas/3-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 06:53:19 | es-increasing-replicas/3-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 06:53:20 | es-increasing-replicas/3-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 06:53:20 | es-increasing-replicas/3-smoke-test | job.batch/report-span created logger.go:42: 06:53:20 | es-increasing-replicas/3-smoke-test | job.batch/check-span created logger.go:42: 06:53:32 | es-increasing-replicas/3-smoke-test | test step completed 3-smoke-test logger.go:42: 06:53:32 | es-increasing-replicas/4-install | starting test step 4-install logger.go:42: 06:53:32 | es-increasing-replicas/4-install | Jaeger:kuttl-test-vital-tortoise/simple-prod updated logger.go:42: 06:53:32 | es-increasing-replicas/4-install | test step completed 4-install logger.go:42: 06:53:32 | es-increasing-replicas/5-check-es-nodes | starting test step 5-check-es-nodes logger.go:42: 06:53:32 | es-increasing-replicas/5-check-es-nodes | running command: [sh -c ./check-es-nodes.sh $NAMESPACE] logger.go:42: 06:53:32 | es-increasing-replicas/5-check-es-nodes | Checking if the number of ES instances is the expected logger.go:42: 06:53:32 | es-increasing-replicas/5-check-es-nodes | false logger.go:42: 06:53:32 | es-increasing-replicas/5-check-es-nodes | Error: no matches found logger.go:42: 06:53:37 | es-increasing-replicas/5-check-es-nodes | Checking if the number of ES instances is the expected logger.go:42: 06:53:37 | es-increasing-replicas/5-check-es-nodes | true logger.go:42: 06:53:37 | es-increasing-replicas/5-check-es-nodes | test step completed 5-check-es-nodes logger.go:42: 06:53:37 | es-increasing-replicas | es-increasing-replicas events from ns kuttl-test-vital-tortoise: logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:27 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-1-cd7cd89df SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-1-cd7cdqjd5m replicaset-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:27 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-1-cd7cdqjd5m Binding Scheduled Successfully assigned kuttl-test-vital-tortoise/elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-1-cd7cdqjd5m to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:27 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-1-cd7cd89df to 1 deployment-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:28 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-1-cd7cdqjd5m AddedInterface Add eth0 [10.129.2.16/23] from ovn-kubernetes logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:28 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-1-cd7cdqjd5m.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:4587958c428b40ac31b46a96a752c2b338814895891023b3ba96ce4c12b5906d" already present on machine kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:28 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-1-cd7cdqjd5m.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:28 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-1-cd7cdqjd5m.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:28 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-1-cd7cdqjd5m.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:8c14e8afb359f1b4ea4b8ed370d15d3b0c272b39ba3b90e5b314f3ccb5ac69c5" already present on machine kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:28 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-1-cd7cdqjd5m.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:28 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-1-cd7cdqjd5m.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:38 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-1-cd7cdqjd5m.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:43 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-1-cd7cdqjd5m.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:54 +0000 UTC Normal Pod simple-prod-collector-c7b5b464f-m842z Binding Scheduled Successfully assigned kuttl-test-vital-tortoise/simple-prod-collector-c7b5b464f-m842z to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:54 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-c7b5b464f SuccessfulCreate Created pod: simple-prod-collector-c7b5b464f-m842z replicaset-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:54 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-c7b5b464f to 1 deployment-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:54 +0000 UTC Normal Pod simple-prod-query-7797d6578f-n6z5f Binding Scheduled Successfully assigned kuttl-test-vital-tortoise/simple-prod-query-7797d6578f-n6z5f to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:54 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7797d6578f SuccessfulCreate Created pod: simple-prod-query-7797d6578f-n6z5f replicaset-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:54 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-7797d6578f to 1 deployment-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:55 +0000 UTC Normal Pod simple-prod-collector-c7b5b464f-m842z AddedInterface Add eth0 [10.128.2.29/23] from ovn-kubernetes logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:55 +0000 UTC Normal Pod simple-prod-collector-c7b5b464f-m842z.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:55 +0000 UTC Normal Pod simple-prod-collector-c7b5b464f-m842z.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:55 +0000 UTC Normal Pod simple-prod-collector-c7b5b464f-m842z.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:55 +0000 UTC Normal Pod simple-prod-query-7797d6578f-n6z5f AddedInterface Add eth0 [10.128.2.30/23] from ovn-kubernetes logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:55 +0000 UTC Normal Pod simple-prod-query-7797d6578f-n6z5f.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:55 +0000 UTC Normal Pod simple-prod-query-7797d6578f-n6z5f.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:55 +0000 UTC Normal Pod simple-prod-query-7797d6578f-n6z5f.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:55 +0000 UTC Normal Pod simple-prod-query-7797d6578f-n6z5f.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:55 +0000 UTC Normal Pod simple-prod-query-7797d6578f-n6z5f.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:55 +0000 UTC Normal Pod simple-prod-query-7797d6578f-n6z5f.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:55 +0000 UTC Normal Pod simple-prod-query-7797d6578f-n6z5f.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:55 +0000 UTC Normal Pod simple-prod-query-7797d6578f-n6z5f.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:55 +0000 UTC Normal Pod simple-prod-query-7797d6578f-n6z5f.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:59 +0000 UTC Normal Pod simple-prod-collector-c7b5b464f-xt6b7 Binding Scheduled Successfully assigned kuttl-test-vital-tortoise/simple-prod-collector-c7b5b464f-xt6b7 to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:59 +0000 UTC Normal Pod simple-prod-collector-c7b5b464f-xt6b7 AddedInterface Add eth0 [10.131.0.45/23] from ovn-kubernetes logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:59 +0000 UTC Normal Pod simple-prod-collector-c7b5b464f-xt6b7.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:59 +0000 UTC Normal Pod simple-prod-collector-c7b5b464f-xt6b7.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:59 +0000 UTC Normal Pod simple-prod-collector-c7b5b464f-xt6b7.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:59 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-c7b5b464f SuccessfulCreate Created pod: simple-prod-collector-c7b5b464f-xt6b7 replicaset-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:59 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-c7b5b464f to 2 from 1 deployment-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:59 +0000 UTC Normal Pod simple-prod-query-7797d6578f-hbbcr Binding Scheduled Successfully assigned kuttl-test-vital-tortoise/simple-prod-query-7797d6578f-hbbcr to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:59 +0000 UTC Normal Pod simple-prod-query-7797d6578f-hbbcr AddedInterface Add eth0 [10.131.0.46/23] from ovn-kubernetes logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:59 +0000 UTC Normal Pod simple-prod-query-7797d6578f-hbbcr.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:59 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7797d6578f SuccessfulCreate Created pod: simple-prod-query-7797d6578f-hbbcr replicaset-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:52:59 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-7797d6578f to 2 from 1 deployment-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:04 +0000 UTC Normal Pod simple-prod-query-7797d6578f-hbbcr.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" in 4.356s (4.356s including waiting) kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:04 +0000 UTC Normal Pod simple-prod-query-7797d6578f-hbbcr.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:04 +0000 UTC Normal Pod simple-prod-query-7797d6578f-hbbcr.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:04 +0000 UTC Normal Pod simple-prod-query-7797d6578f-hbbcr.spec.containers{oauth-proxy} Pulling Pulling image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:08 +0000 UTC Normal Pod simple-prod-query-7797d6578f-hbbcr.spec.containers{oauth-proxy} Pulled Successfully pulled image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" in 3.748s (3.748s including waiting) kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:08 +0000 UTC Normal Pod simple-prod-query-7797d6578f-hbbcr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:08 +0000 UTC Normal Pod simple-prod-query-7797d6578f-hbbcr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:08 +0000 UTC Normal Pod simple-prod-query-7797d6578f-hbbcr.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:10 +0000 UTC Normal Pod simple-prod-query-7797d6578f-hbbcr.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" in 2.277s (2.277s including waiting) kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:10 +0000 UTC Normal Pod simple-prod-query-7797d6578f-hbbcr.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:10 +0000 UTC Normal Pod simple-prod-query-7797d6578f-hbbcr.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:14 +0000 UTC Normal Pod simple-prod-query-7797d6578f-hbbcr.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:14 +0000 UTC Normal Pod simple-prod-query-7797d6578f-hbbcr.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:14 +0000 UTC Normal Pod simple-prod-query-7797d6578f-hbbcr.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:14 +0000 UTC Normal Pod simple-prod-query-7797d6578f-n6z5f.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:14 +0000 UTC Normal Pod simple-prod-query-7797d6578f-n6z5f.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:14 +0000 UTC Normal Pod simple-prod-query-7797d6578f-n6z5f.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:14 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7797d6578f SuccessfulDelete Deleted pod: simple-prod-query-7797d6578f-n6z5f replicaset-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:14 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7797d6578f SuccessfulDelete Deleted pod: simple-prod-query-7797d6578f-hbbcr replicaset-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:14 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-7797d6578f to 0 from 2 deployment-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:14 +0000 UTC Warning Endpoints simple-prod-query FailedToUpdateEndpoint Failed to update endpoint kuttl-test-vital-tortoise/simple-prod-query: Operation cannot be fulfilled on endpoints "simple-prod-query": the object has been modified; please apply your changes to the latest version and try again endpoint-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:15 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-ls99c Binding Scheduled Successfully assigned kuttl-test-vital-tortoise/simple-prod-query-56bf7dd4d5-ls99c to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:15 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-rf6v2 Binding Scheduled Successfully assigned kuttl-test-vital-tortoise/simple-prod-query-56bf7dd4d5-rf6v2 to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:15 +0000 UTC Normal ReplicaSet.apps simple-prod-query-56bf7dd4d5 SuccessfulCreate Created pod: simple-prod-query-56bf7dd4d5-rf6v2 replicaset-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:15 +0000 UTC Normal ReplicaSet.apps simple-prod-query-56bf7dd4d5 SuccessfulCreate Created pod: simple-prod-query-56bf7dd4d5-ls99c replicaset-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:15 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-56bf7dd4d5 to 2 deployment-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:16 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-ls99c AddedInterface Add eth0 [10.131.0.47/23] from ovn-kubernetes logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:16 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-ls99c.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:16 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-ls99c.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:16 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-ls99c.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:16 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-ls99c.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:16 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-ls99c.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:16 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-ls99c.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:16 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-ls99c.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:16 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-ls99c.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:16 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-ls99c.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:16 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-rf6v2 AddedInterface Add eth0 [10.128.2.31/23] from ovn-kubernetes logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:16 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-rf6v2.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:16 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-rf6v2.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:16 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-rf6v2.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:16 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-rf6v2.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:16 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-rf6v2.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:16 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-rf6v2.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:16 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-rf6v2.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:16 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-rf6v2.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:16 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-rf6v2.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:20 +0000 UTC Normal Pod check-span-5wvhv Binding Scheduled Successfully assigned kuttl-test-vital-tortoise/check-span-5wvhv to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:20 +0000 UTC Normal Pod check-span-5wvhv AddedInterface Add eth0 [10.131.0.48/23] from ovn-kubernetes logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:20 +0000 UTC Normal Pod check-span-5wvhv.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:20 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-5wvhv job-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:20 +0000 UTC Normal Pod report-span-pndgz Binding Scheduled Successfully assigned kuttl-test-vital-tortoise/report-span-pndgz to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:20 +0000 UTC Normal Pod report-span-pndgz AddedInterface Add eth0 [10.128.2.32/23] from ovn-kubernetes logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:20 +0000 UTC Normal Pod report-span-pndgz.spec.containers{report-span} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:20 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-pndgz job-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:21 +0000 UTC Normal Pod check-span-5wvhv.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:21 +0000 UTC Normal Pod check-span-5wvhv.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:22 +0000 UTC Normal Pod report-span-pndgz.spec.containers{report-span} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" in 1.686s (1.686s including waiting) kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:22 +0000 UTC Normal Pod report-span-pndgz.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:22 +0000 UTC Normal Pod report-span-pndgz.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:31 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:33 +0000 UTC Normal Pod simple-prod-collector-c7b5b464f-m842z.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:33 +0000 UTC Normal Pod simple-prod-collector-c7b5b464f-xt6b7.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:33 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-c7b5b464f SuccessfulDelete Deleted pod: simple-prod-collector-c7b5b464f-xt6b7 replicaset-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:33 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-c7b5b464f SuccessfulDelete Deleted pod: simple-prod-collector-c7b5b464f-m842z replicaset-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:33 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled down replica set simple-prod-collector-c7b5b464f to 0 from 2 deployment-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:33 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-ls99c.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:33 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-ls99c.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:33 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-ls99c.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:33 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-rf6v2.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:33 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-rf6v2.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:33 +0000 UTC Normal Pod simple-prod-query-56bf7dd4d5-rf6v2.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:33 +0000 UTC Normal ReplicaSet.apps simple-prod-query-56bf7dd4d5 SuccessfulDelete Deleted pod: simple-prod-query-56bf7dd4d5-ls99c replicaset-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:33 +0000 UTC Normal ReplicaSet.apps simple-prod-query-56bf7dd4d5 SuccessfulDelete Deleted pod: simple-prod-query-56bf7dd4d5-rf6v2 replicaset-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:33 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-56bf7dd4d5 to 0 from 2 deployment-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:34 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-2-756986f7c SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-2-756986nzp7 replicaset-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-2-756986nzp7 Binding Scheduled Successfully assigned kuttl-test-vital-tortoise/elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-2-756986nzp7 to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-2-756986nzp7 AddedInterface Add eth0 [10.131.0.49/23] from ovn-kubernetes logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-2-756986nzp7.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:4587958c428b40ac31b46a96a752c2b338814895891023b3ba96ce4c12b5906d" already present on machine kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-2-756986nzp7.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-2-756986nzp7.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-2-756986nzp7.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:8c14e8afb359f1b4ea4b8ed370d15d3b0c272b39ba3b90e5b314f3ccb5ac69c5" already present on machine kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-2-756986nzp7.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-2-756986nzp7.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:34 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-2 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestvitaltortoisesimpleprod-2-756986f7c to 1 deployment-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-collector-659d467586-vwfrx Binding Scheduled Successfully assigned kuttl-test-vital-tortoise/simple-prod-collector-659d467586-vwfrx to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-collector-659d467586-vwfrx AddedInterface Add eth0 [10.131.0.50/23] from ovn-kubernetes logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-collector-659d467586-vwfrx.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-collector-659d467586-vwfrx.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-collector-659d467586-vwfrx.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-collector-659d467586-xk67j Binding Scheduled Successfully assigned kuttl-test-vital-tortoise/simple-prod-collector-659d467586-xk67j to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-collector-659d467586-xk67j AddedInterface Add eth0 [10.128.2.33/23] from ovn-kubernetes logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-collector-659d467586-xk67j.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-collector-659d467586-xk67j.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-collector-659d467586-xk67j.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-659d467586 SuccessfulCreate Created pod: simple-prod-collector-659d467586-xk67j replicaset-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-659d467586 SuccessfulCreate Created pod: simple-prod-collector-659d467586-vwfrx replicaset-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-659d467586 to 2 deployment-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-query-5747fcf9c6-dj5fv Binding Scheduled Successfully assigned kuttl-test-vital-tortoise/simple-prod-query-5747fcf9c6-dj5fv to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-query-5747fcf9c6-dj5fv AddedInterface Add eth0 [10.128.2.34/23] from ovn-kubernetes logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-query-5747fcf9c6-dj5fv.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-query-5747fcf9c6-dj5fv.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-query-5747fcf9c6-dj5fv.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-query-5747fcf9c6-dj5fv.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-query-5747fcf9c6-dj5fv.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-query-5747fcf9c6-dj5fv.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-query-5747fcf9c6-dj5fv.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-query-5747fcf9c6-dj5fv.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-query-5747fcf9c6-dj5fv.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-query-5747fcf9c6-k78kn Binding Scheduled Successfully assigned kuttl-test-vital-tortoise/simple-prod-query-5747fcf9c6-k78kn to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-query-5747fcf9c6-k78kn AddedInterface Add eth0 [10.129.2.17/23] from ovn-kubernetes logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Pod simple-prod-query-5747fcf9c6-k78kn.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" kubelet logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal ReplicaSet.apps simple-prod-query-5747fcf9c6 SuccessfulCreate Created pod: simple-prod-query-5747fcf9c6-dj5fv replicaset-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal ReplicaSet.apps simple-prod-query-5747fcf9c6 SuccessfulCreate Created pod: simple-prod-query-5747fcf9c6-k78kn replicaset-controller logger.go:42: 06:53:37 | es-increasing-replicas | 2023-12-18 06:53:35 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-5747fcf9c6 to 2 deployment-controller logger.go:42: 06:53:37 | es-increasing-replicas | Deleting namespace: kuttl-test-vital-tortoise === CONT kuttl/harness/es-index-cleaner-autoprov logger.go:42: 06:54:13 | es-index-cleaner-autoprov | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:54:13 | es-index-cleaner-autoprov | Creating namespace: kuttl-test-frank-sponge logger.go:42: 06:54:13 | es-index-cleaner-autoprov/1-install | starting test step 1-install logger.go:42: 06:54:13 | es-index-cleaner-autoprov/1-install | Jaeger:kuttl-test-frank-sponge/test-es-index-cleaner-with-prefix created logger.go:42: 06:54:51 | es-index-cleaner-autoprov/1-install | test step completed 1-install logger.go:42: 06:54:51 | es-index-cleaner-autoprov/2-report-spans | starting test step 2-report-spans logger.go:42: 06:54:51 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE test-es-index-cleaner-with-prefix /dev/null] logger.go:42: 06:54:52 | es-index-cleaner-autoprov/2-report-spans | Warning: resource jaegers/test-es-index-cleaner-with-prefix is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 06:54:58 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c DAYS=5 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d JOB_NUMBER=00 JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-00-job.yaml] logger.go:42: 06:54:59 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c kubectl apply -f report-span-00-job.yaml -n $NAMESPACE] logger.go:42: 06:54:59 | es-index-cleaner-autoprov/2-report-spans | job.batch/00-report-span created logger.go:42: 06:55:38 | es-index-cleaner-autoprov/2-report-spans | test step completed 2-report-spans logger.go:42: 06:55:38 | es-index-cleaner-autoprov/3-install | starting test step 3-install logger.go:42: 06:55:39 | es-index-cleaner-autoprov/3-install | Jaeger:kuttl-test-frank-sponge/test-es-index-cleaner-with-prefix updated logger.go:42: 06:55:39 | es-index-cleaner-autoprov/3-install | test step completed 3-install logger.go:42: 06:55:39 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | starting test step 4-wait-es-index-cleaner logger.go:42: 06:55:39 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | running command: [sh -c go run ../../../../cmd-utils/wait-cronjob/main.go --cronjob test-es-index-cleaner-with-prefix-es-index-cleaner --namespace $NAMESPACE] logger.go:42: 06:55:40 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-18T06:55:40Z" level=debug msg="Checking if the test-es-index-cleaner-with-prefix-es-index-cleaner CronJob exists" logger.go:42: 06:55:40 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-18T06:55:40Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 06:55:40 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-18T06:55:40Z" level=info msg="Cronjob test-es-index-cleaner-with-prefix-es-index-cleaner found successfully" logger.go:42: 06:55:40 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-18T06:55:40Z" level=debug msg="Waiting for the next scheduled job from test-es-index-cleaner-with-prefix-es-index-cleaner cronjob" logger.go:42: 06:55:40 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-18T06:55:40Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 06:55:40 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-18T06:55:40Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 06:55:50 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-18T06:55:50Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 06:56:00 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-18T06:56:00Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 06:56:10 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-18T06:56:10Z" level=info msg="Job of owner test-es-index-cleaner-with-prefix-es-index-cleaner succeeded after test-es-index-cleaner-with-prefix-es-index-cleaner 30.029545022s" logger.go:42: 06:56:10 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | test step completed 4-wait-es-index-cleaner logger.go:42: 06:56:10 | es-index-cleaner-autoprov/5-install | starting test step 5-install logger.go:42: 06:56:10 | es-index-cleaner-autoprov/5-install | Jaeger:kuttl-test-frank-sponge/test-es-index-cleaner-with-prefix updated logger.go:42: 06:56:10 | es-index-cleaner-autoprov/5-install | test step completed 5-install logger.go:42: 06:56:10 | es-index-cleaner-autoprov/6-check-indices | starting test step 6-check-indices logger.go:42: 06:56:10 | es-index-cleaner-autoprov/6-check-indices | Job:kuttl-test-frank-sponge/00-check-indices created logger.go:42: 06:56:14 | es-index-cleaner-autoprov/6-check-indices | test step completed 6-check-indices logger.go:42: 06:56:14 | es-index-cleaner-autoprov | es-index-cleaner-autoprov events from ns kuttl-test-frank-sponge: logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:20 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestfrankspongetestesindexclean-1-b98457999 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestfrankspongetestesindexclean-1-bgmdqx replicaset-controller logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfrankspongetestesindexclean-1-bgmdqx Binding Scheduled Successfully assigned kuttl-test-frank-sponge/elasticsearch-cdm-kuttltestfrankspongetestesindexclean-1-bgmdqx to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfrankspongetestesindexclean-1-bgmdqx AddedInterface Add eth0 [10.129.2.18/23] from ovn-kubernetes logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfrankspongetestesindexclean-1-bgmdqx.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:4587958c428b40ac31b46a96a752c2b338814895891023b3ba96ce4c12b5906d" already present on machine kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfrankspongetestesindexclean-1-bgmdqx.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfrankspongetestesindexclean-1-bgmdqx.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfrankspongetestesindexclean-1-bgmdqx.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:8c14e8afb359f1b4ea4b8ed370d15d3b0c272b39ba3b90e5b314f3ccb5ac69c5" already present on machine kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfrankspongetestesindexclean-1-bgmdqx.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestfrankspongetestesindexclean-1-bgmdqx.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:20 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestfrankspongetestesindexclean-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestfrankspongetestesindexclean-1-b98457999 to 1 deployment-controller logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:30 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestfrankspongetestesindexclean-1-bgmdqx.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:35 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestfrankspongetestesindexclean-1-bgmdqx.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:46 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-5685bf877d-6c4wt Binding Scheduled Successfully assigned kuttl-test-frank-sponge/test-es-index-cleaner-with-prefix-collector-5685bf877d-6c4wt to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:46 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-collector-5685bf877d SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-collector-5685bf877d-6c4wt replicaset-controller logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:46 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-collector ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-collector-5685bf877d to 1 deployment-controller logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:46 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-query-69bcb7d5b to 1 deployment-controller logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:47 +0000 UTC Warning Pod test-es-index-cleaner-with-prefix-collector-5685bf877d-6c4wt ErrorUpdatingResource addLogicalPort failed for kuttl-test-frank-sponge/test-es-index-cleaner-with-prefix-collector-5685bf877d-6c4wt: failed to update pod kuttl-test-frank-sponge/test-es-index-cleaner-with-prefix-collector-5685bf877d-6c4wt: Operation cannot be fulfilled on pods "test-es-index-cleaner-with-prefix-collector-5685bf877d-6c4wt": the object has been modified; please apply your changes to the latest version and try again controlplane logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:47 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-5685bf877d-6c4wt AddedInterface Add eth0 [10.131.0.52/23] from ovn-kubernetes logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:47 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-5685bf877d-6c4wt.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:47 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-5685bf877d-6c4wt.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:47 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-5685bf877d-6c4wt.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:47 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69bcb7d5b-lqwbj Binding Scheduled Successfully assigned kuttl-test-frank-sponge/test-es-index-cleaner-with-prefix-query-69bcb7d5b-lqwbj to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:47 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-69bcb7d5b SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-query-69bcb7d5b-lqwbj replicaset-controller logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:48 +0000 UTC Warning Pod test-es-index-cleaner-with-prefix-query-69bcb7d5b-lqwbj FailedMount MountVolume.SetUp failed for volume "test-es-index-cleaner-with-prefix-ui-oauth-proxy-tls" : failed to sync secret cache: timed out waiting for the condition kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:48 +0000 UTC Warning Pod test-es-index-cleaner-with-prefix-query-69bcb7d5b-lqwbj FailedMount MountVolume.SetUp failed for volume "test-es-index-cleaner-with-prefix-service-ca" : failed to sync configmap cache: timed out waiting for the condition kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:48 +0000 UTC Warning Pod test-es-index-cleaner-with-prefix-query-69bcb7d5b-lqwbj FailedMount MountVolume.SetUp failed for volume "test-es-index-cleaner-with-prefix-trusted-ca" : failed to sync configmap cache: timed out waiting for the condition kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:48 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69bcb7d5b-lqwbj AddedInterface Add eth0 [10.128.2.35/23] from ovn-kubernetes logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:48 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69bcb7d5b-lqwbj.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:49 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69bcb7d5b-lqwbj.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:49 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69bcb7d5b-lqwbj.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:49 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69bcb7d5b-lqwbj.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:49 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69bcb7d5b-lqwbj.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:49 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69bcb7d5b-lqwbj.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:49 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69bcb7d5b-lqwbj.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:49 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69bcb7d5b-lqwbj.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:49 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69bcb7d5b-lqwbj.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:53 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69bcb7d5b-lqwbj.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:53 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69bcb7d5b-lqwbj.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:53 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-69bcb7d5b-lqwbj.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:53 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-69bcb7d5b SuccessfulDelete Deleted pod: test-es-index-cleaner-with-prefix-query-69bcb7d5b-lqwbj replicaset-controller logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:53 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled down replica set test-es-index-cleaner-with-prefix-query-69bcb7d5b to 0 from 1 deployment-controller logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:54 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f4bdc9755-cfdjg Binding Scheduled Successfully assigned kuttl-test-frank-sponge/test-es-index-cleaner-with-prefix-query-5f4bdc9755-cfdjg to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:54 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f4bdc9755-cfdjg AddedInterface Add eth0 [10.128.2.36/23] from ovn-kubernetes logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:54 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f4bdc9755-cfdjg.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:54 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f4bdc9755-cfdjg.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:54 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f4bdc9755-cfdjg.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:54 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f4bdc9755-cfdjg.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:54 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f4bdc9755-cfdjg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:54 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f4bdc9755-cfdjg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:54 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-5f4bdc9755 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-query-5f4bdc9755-cfdjg replicaset-controller logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:54 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-query-5f4bdc9755 to 1 deployment-controller logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:55 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f4bdc9755-cfdjg.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:55 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f4bdc9755-cfdjg.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:55 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-5f4bdc9755-cfdjg.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:59 +0000 UTC Normal Pod 00-report-span-cc595 Binding Scheduled Successfully assigned kuttl-test-frank-sponge/00-report-span-cc595 to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:54:59 +0000 UTC Normal Job.batch 00-report-span SuccessfulCreate Created pod: 00-report-span-cc595 job-controller logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:55:00 +0000 UTC Normal Pod 00-report-span-cc595 AddedInterface Add eth0 [10.131.0.53/23] from ovn-kubernetes logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:55:00 +0000 UTC Normal Pod 00-report-span-cc595.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:55:00 +0000 UTC Normal Pod 00-report-span-cc595.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:55:00 +0000 UTC Normal Pod 00-report-span-cc595.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:55:02 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:55:02 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:55:02 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:55:38 +0000 UTC Normal Job.batch 00-report-span Completed Job completed job-controller logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:55:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:55:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod test-es-index-cleaner-with-prefix-collector-5685bf877d-6c4wt horizontal-pod-autoscaler logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:55:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:56:00 +0000 UTC Normal Job.batch test-es-index-cleaner-with-prefix-es-index-cleaner-28381376 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-es-index-cleaner-2838137lr898 job-controller logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:56:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2838137lr898 Binding Scheduled Successfully assigned kuttl-test-frank-sponge/test-es-index-cleaner-with-prefix-es-index-cleaner-2838137lr898 to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:56:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2838137lr898 AddedInterface Add eth0 [10.131.0.54/23] from ovn-kubernetes logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:56:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2838137lr898.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-index-cleaner-rhel8@sha256:e5bea9a64ae418869cfb556d70e0a586c21589aed8606b4ff5850780ff5bbbd6" kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:56:00 +0000 UTC Normal CronJob.batch test-es-index-cleaner-with-prefix-es-index-cleaner SuccessfulCreate Created job test-es-index-cleaner-with-prefix-es-index-cleaner-28381376 cronjob-controller logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:56:03 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2838137lr898.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-index-cleaner-rhel8@sha256:e5bea9a64ae418869cfb556d70e0a586c21589aed8606b4ff5850780ff5bbbd6" in 3.248s (3.248s including waiting) kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:56:03 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2838137lr898.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Created Created container test-es-index-cleaner-with-prefix-es-index-cleaner kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:56:03 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-2838137lr898.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Started Started container test-es-index-cleaner-with-prefix-es-index-cleaner kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:56:06 +0000 UTC Normal Job.batch test-es-index-cleaner-with-prefix-es-index-cleaner-28381376 Completed Job completed job-controller logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:56:06 +0000 UTC Normal CronJob.batch test-es-index-cleaner-with-prefix-es-index-cleaner SawCompletedJob Saw completed job: test-es-index-cleaner-with-prefix-es-index-cleaner-28381376, status: Complete cronjob-controller logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:56:10 +0000 UTC Normal Pod 00-check-indices-d4xjm Binding Scheduled Successfully assigned kuttl-test-frank-sponge/00-check-indices-d4xjm to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:56:10 +0000 UTC Normal Job.batch 00-check-indices SuccessfulCreate Created pod: 00-check-indices-d4xjm job-controller logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:56:11 +0000 UTC Normal Pod 00-check-indices-d4xjm AddedInterface Add eth0 [10.131.0.55/23] from ovn-kubernetes logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:56:11 +0000 UTC Normal Pod 00-check-indices-d4xjm.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:56:11 +0000 UTC Normal Pod 00-check-indices-d4xjm.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:56:11 +0000 UTC Normal Pod 00-check-indices-d4xjm.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:56:14 | es-index-cleaner-autoprov | 2023-12-18 06:56:14 +0000 UTC Normal Job.batch 00-check-indices Completed Job completed job-controller logger.go:42: 06:56:14 | es-index-cleaner-autoprov | Deleting namespace: kuttl-test-frank-sponge === CONT kuttl/harness/es-from-aio-to-production logger.go:42: 06:56:22 | es-from-aio-to-production | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:56:22 | es-from-aio-to-production | Creating namespace: kuttl-test-adequate-bedbug logger.go:42: 06:56:22 | es-from-aio-to-production/0-install | starting test step 0-install logger.go:42: 06:56:22 | es-from-aio-to-production/0-install | Jaeger:kuttl-test-adequate-bedbug/my-jaeger created logger.go:42: 06:56:30 | es-from-aio-to-production/0-install | test step completed 0-install logger.go:42: 06:56:30 | es-from-aio-to-production/1-smoke-test | starting test step 1-smoke-test logger.go:42: 06:56:30 | es-from-aio-to-production/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 06:56:31 | es-from-aio-to-production/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 06:56:38 | es-from-aio-to-production/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 06:56:38 | es-from-aio-to-production/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 06:56:38 | es-from-aio-to-production/1-smoke-test | job.batch/report-span created logger.go:42: 06:56:38 | es-from-aio-to-production/1-smoke-test | job.batch/check-span created logger.go:42: 06:56:50 | es-from-aio-to-production/1-smoke-test | test step completed 1-smoke-test logger.go:42: 06:56:50 | es-from-aio-to-production/3-install | starting test step 3-install logger.go:42: 06:56:50 | es-from-aio-to-production/3-install | Jaeger:kuttl-test-adequate-bedbug/my-jaeger updated logger.go:42: 06:57:24 | es-from-aio-to-production/3-install | test step completed 3-install logger.go:42: 06:57:24 | es-from-aio-to-production/4-smoke-test | starting test step 4-smoke-test logger.go:42: 06:57:24 | es-from-aio-to-production/4-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 06:57:32 | es-from-aio-to-production/4-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 06:57:32 | es-from-aio-to-production/4-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 06:57:32 | es-from-aio-to-production/4-smoke-test | job.batch/report-span unchanged logger.go:42: 06:57:32 | es-from-aio-to-production/4-smoke-test | job.batch/check-span unchanged logger.go:42: 06:57:33 | es-from-aio-to-production/4-smoke-test | test step completed 4-smoke-test logger.go:42: 06:57:33 | es-from-aio-to-production | es-from-aio-to-production events from ns kuttl-test-adequate-bedbug: logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:25 +0000 UTC Normal Pod my-jaeger-765fd6b7d5-b2xsn Binding Scheduled Successfully assigned kuttl-test-adequate-bedbug/my-jaeger-765fd6b7d5-b2xsn to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:25 +0000 UTC Normal ReplicaSet.apps my-jaeger-765fd6b7d5 SuccessfulCreate Created pod: my-jaeger-765fd6b7d5-b2xsn replicaset-controller logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:25 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-765fd6b7d5 to 1 deployment-controller logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:26 +0000 UTC Normal Pod my-jaeger-765fd6b7d5-b2xsn AddedInterface Add eth0 [10.129.2.19/23] from ovn-kubernetes logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:26 +0000 UTC Normal Pod my-jaeger-765fd6b7d5-b2xsn.spec.containers{jaeger} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:29 +0000 UTC Normal Pod my-jaeger-765fd6b7d5-b2xsn.spec.containers{jaeger} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" in 3.314s (3.314s including waiting) kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:29 +0000 UTC Normal Pod my-jaeger-765fd6b7d5-b2xsn.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:29 +0000 UTC Normal Pod my-jaeger-765fd6b7d5-b2xsn.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:29 +0000 UTC Normal Pod my-jaeger-765fd6b7d5-b2xsn.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:29 +0000 UTC Normal Pod my-jaeger-765fd6b7d5-b2xsn.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:29 +0000 UTC Normal Pod my-jaeger-765fd6b7d5-b2xsn.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:34 +0000 UTC Normal Pod my-jaeger-765fd6b7d5-b2xsn.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:34 +0000 UTC Normal Pod my-jaeger-765fd6b7d5-b2xsn.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:34 +0000 UTC Normal ReplicaSet.apps my-jaeger-765fd6b7d5 SuccessfulDelete Deleted pod: my-jaeger-765fd6b7d5-b2xsn replicaset-controller logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:34 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-765fd6b7d5 to 0 from 1 deployment-controller logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:35 +0000 UTC Normal Pod my-jaeger-86d87857bd-hwqrk Binding Scheduled Successfully assigned kuttl-test-adequate-bedbug/my-jaeger-86d87857bd-hwqrk to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:35 +0000 UTC Normal Pod my-jaeger-86d87857bd-hwqrk AddedInterface Add eth0 [10.129.2.20/23] from ovn-kubernetes logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:35 +0000 UTC Normal Pod my-jaeger-86d87857bd-hwqrk.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:35 +0000 UTC Normal ReplicaSet.apps my-jaeger-86d87857bd SuccessfulCreate Created pod: my-jaeger-86d87857bd-hwqrk replicaset-controller logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:35 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-86d87857bd to 1 deployment-controller logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:36 +0000 UTC Normal Pod my-jaeger-86d87857bd-hwqrk.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:36 +0000 UTC Normal Pod my-jaeger-86d87857bd-hwqrk.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:36 +0000 UTC Normal Pod my-jaeger-86d87857bd-hwqrk.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:36 +0000 UTC Normal Pod my-jaeger-86d87857bd-hwqrk.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:36 +0000 UTC Normal Pod my-jaeger-86d87857bd-hwqrk.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:38 +0000 UTC Normal Pod check-span-q5jtx Binding Scheduled Successfully assigned kuttl-test-adequate-bedbug/check-span-q5jtx to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:38 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-q5jtx job-controller logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:38 +0000 UTC Normal Pod report-span-hhwcd Binding Scheduled Successfully assigned kuttl-test-adequate-bedbug/report-span-hhwcd to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:38 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-hhwcd job-controller logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:39 +0000 UTC Normal Pod check-span-q5jtx AddedInterface Add eth0 [10.128.2.37/23] from ovn-kubernetes logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:39 +0000 UTC Normal Pod check-span-q5jtx.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:39 +0000 UTC Normal Pod check-span-q5jtx.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:39 +0000 UTC Normal Pod check-span-q5jtx.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:39 +0000 UTC Normal Pod report-span-hhwcd AddedInterface Add eth0 [10.131.0.56/23] from ovn-kubernetes logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:39 +0000 UTC Normal Pod report-span-hhwcd.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:39 +0000 UTC Normal Pod report-span-hhwcd.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:39 +0000 UTC Normal Pod report-span-hhwcd.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:49 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:54 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestadequatebedbugmyjaeger-1-5566ffb5c9 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestadequatebedbugmyjaeger-1-5566fflm68l replicaset-controller logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:54 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestadequatebedbugmyjaeger-1-5566fflm68l Binding Scheduled Successfully assigned kuttl-test-adequate-bedbug/elasticsearch-cdm-kuttltestadequatebedbugmyjaeger-1-5566fflm68l to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:54 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestadequatebedbugmyjaeger-1-5566fflm68l AddedInterface Add eth0 [10.129.2.21/23] from ovn-kubernetes logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:54 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestadequatebedbugmyjaeger-1-5566fflm68l.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:4587958c428b40ac31b46a96a752c2b338814895891023b3ba96ce4c12b5906d" already present on machine kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:54 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestadequatebedbugmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestadequatebedbugmyjaeger-1-5566ffb5c9 to 1 deployment-controller logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:55 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestadequatebedbugmyjaeger-1-5566fflm68l.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:55 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestadequatebedbugmyjaeger-1-5566fflm68l.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:55 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestadequatebedbugmyjaeger-1-5566fflm68l.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:8c14e8afb359f1b4ea4b8ed370d15d3b0c272b39ba3b90e5b314f3ccb5ac69c5" already present on machine kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:55 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestadequatebedbugmyjaeger-1-5566fflm68l.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:56:55 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestadequatebedbugmyjaeger-1-5566fflm68l.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:10 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestadequatebedbugmyjaeger-1-5566fflm68l.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:12 +0000 UTC Normal Job.batch report-span Completed Job completed job-controller logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal Pod my-jaeger-86d87857bd-hwqrk.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal Pod my-jaeger-86d87857bd-hwqrk.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal Pod my-jaeger-collector-6b49bd55ff-zghtk Binding Scheduled Successfully assigned kuttl-test-adequate-bedbug/my-jaeger-collector-6b49bd55ff-zghtk to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal Pod my-jaeger-collector-6b49bd55ff-zghtk AddedInterface Add eth0 [10.128.2.38/23] from ovn-kubernetes logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal Pod my-jaeger-collector-6b49bd55ff-zghtk.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal Pod my-jaeger-collector-6b49bd55ff-zghtk.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal Pod my-jaeger-collector-6b49bd55ff-zghtk.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-6b49bd55ff SuccessfulCreate Created pod: my-jaeger-collector-6b49bd55ff-zghtk replicaset-controller logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-6b49bd55ff to 1 deployment-controller logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal Pod my-jaeger-query-7cb4bd84c6-mb4mf Binding Scheduled Successfully assigned kuttl-test-adequate-bedbug/my-jaeger-query-7cb4bd84c6-mb4mf to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal Pod my-jaeger-query-7cb4bd84c6-mb4mf AddedInterface Add eth0 [10.131.0.57/23] from ovn-kubernetes logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal Pod my-jaeger-query-7cb4bd84c6-mb4mf.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal Pod my-jaeger-query-7cb4bd84c6-mb4mf.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal Pod my-jaeger-query-7cb4bd84c6-mb4mf.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal Pod my-jaeger-query-7cb4bd84c6-mb4mf.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal Pod my-jaeger-query-7cb4bd84c6-mb4mf.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal Pod my-jaeger-query-7cb4bd84c6-mb4mf.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal Pod my-jaeger-query-7cb4bd84c6-mb4mf.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal Pod my-jaeger-query-7cb4bd84c6-mb4mf.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal Pod my-jaeger-query-7cb4bd84c6-mb4mf.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-7cb4bd84c6 SuccessfulCreate Created pod: my-jaeger-query-7cb4bd84c6-mb4mf replicaset-controller logger.go:42: 06:57:33 | es-from-aio-to-production | 2023-12-18 06:57:21 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-7cb4bd84c6 to 1 deployment-controller logger.go:42: 06:57:33 | es-from-aio-to-production | Deleting namespace: kuttl-test-adequate-bedbug === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (691.79s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.04s) --- PASS: kuttl/harness/es-multiinstance (114.77s) --- PASS: kuttl/harness/es-simple-prod (6.05s) --- PASS: kuttl/harness/es-rollover-autoprov (247.03s) --- PASS: kuttl/harness/es-increasing-replicas (111.51s) --- PASS: kuttl/harness/es-index-cleaner-autoprov (128.62s) --- PASS: kuttl/harness/es-from-aio-to-production (77.72s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name elasticsearch --report --output /logs/artifacts/elasticsearch.xml ./artifacts/kuttl-report.xml time="2023-12-18T06:57:41Z" level=debug msg="Setting a new name for the test suites" time="2023-12-18T06:57:41Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-18T06:57:41Z" level=debug msg="normalizing test case names" time="2023-12-18T06:57:41Z" level=debug msg="elasticsearch/artifacts -> elasticsearch_artifacts" time="2023-12-18T06:57:41Z" level=debug msg="elasticsearch/es-multiinstance -> elasticsearch_es_multiinstance" time="2023-12-18T06:57:41Z" level=debug msg="elasticsearch/es-simple-prod -> elasticsearch_es_simple_prod" time="2023-12-18T06:57:41Z" level=debug msg="elasticsearch/es-rollover-autoprov -> elasticsearch_es_rollover_autoprov" time="2023-12-18T06:57:41Z" level=debug msg="elasticsearch/es-increasing-replicas -> elasticsearch_es_increasing_replicas" time="2023-12-18T06:57:41Z" level=debug msg="elasticsearch/es-index-cleaner-autoprov -> elasticsearch_es_index_cleaner_autoprov" time="2023-12-18T06:57:41Z" level=debug msg="elasticsearch/es-from-aio-to-production -> elasticsearch_es_from_aio_to_production" +-----------------------------------------+--------+ | NAME | RESULT | +-----------------------------------------+--------+ | elasticsearch_artifacts | passed | | elasticsearch_es_multiinstance | passed | | elasticsearch_es_simple_prod | passed | | elasticsearch_es_rollover_autoprov | passed | | elasticsearch_es_increasing_replicas | passed | | elasticsearch_es_index_cleaner_autoprov | passed | | elasticsearch_es_from_aio_to_production | passed | +-----------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh examples false true + '[' 3 -ne 3 ']' + test_suite_name=examples + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/examples.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-examples make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=3.6.0 \ SKIP_KAFKA=false \ VERTX_IMG=jaegertracing/vertx-create-span:operator-e2e-tests \ ./tests/e2e/examples/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-17-173511 True False 19m Cluster version is 4.15.0-0.nightly-2023-12-17-173511' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-17-173511 True False 19m Cluster version is 4.15.0-0.nightly-2023-12-17-173511' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/examples/render.sh ++ export SUITE_DIR=./tests/e2e/examples ++ SUITE_DIR=./tests/e2e/examples ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/examples ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test examples-agent-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-agent-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-with-priority-class\e[0m' Rendering files for test examples-agent-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build + '[' _build '!=' _build ']' + mkdir -p examples-agent-with-priority-class + cd examples-agent-with-priority-class + example_name=agent-with-priority-class + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + '[' true '!=' true ']' + render_install_example agent-with-priority-class 02 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=02 + install_file=./02-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/agent-with-priority-class.yaml -o ./02-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./02-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./02-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./02-install.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + JAEGER_NAME=agent-as-daemonset + local jaeger_strategy ++ get_jaeger_strategy ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./02-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./02-install.yaml ++ strategy=DaemonSet ++ '[' DaemonSet = null ']' ++ echo DaemonSet ++ return 0 + jaeger_strategy=DaemonSet + '[' DaemonSet = DaemonSet ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./02-assert.yaml + render_smoke_test_example agent-with-priority-class 02 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + jaeger_name=agent-as-daemonset + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test agent-as-daemonset true 02 + '[' 3 -ne 3 ']' + jaeger=agent-as-daemonset + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-all-in-one-with-options + '[' 1 -ne 1 ']' + test_name=examples-all-in-one-with-options + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-all-in-one-with-options' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-all-in-one-with-options\e[0m' Rendering files for test examples-all-in-one-with-options + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-with-priority-class + '[' examples-agent-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-all-in-one-with-options + cd examples-all-in-one-with-options + example_name=all-in-one-with-options + render_install_example all-in-one-with-options 00 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/all-in-one-with-options.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + JAEGER_NAME=my-jaeger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.metadata.name="my-jaeger"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i 'del(.spec.allInOne.image)' ./00-install.yaml + render_smoke_test_example all-in-one-with-options 01 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + jaeger_name=my-jaeger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + sed -i s~my-jaeger-query:443~my-jaeger-query:443/jaeger~gi ./01-smoke-test.yaml + start_test examples-business-application-injected-sidecar + '[' 1 -ne 1 ']' + test_name=examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-business-application-injected-sidecar' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-business-application-injected-sidecar\e[0m' Rendering files for test examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-all-in-one-with-options + '[' examples-all-in-one-with-options '!=' _build ']' + cd .. + mkdir -p examples-business-application-injected-sidecar + cd examples-business-application-injected-sidecar + example_name=simplest + cp /tmp/jaeger-tests/examples/business-application-injected-sidecar.yaml ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].image=strenv(VERTX_IMG)' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.path="/"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.port=8080' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.initialDelaySeconds=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.failureThreshold=3' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.periodSeconds=10' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.successThreshold=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.timeoutSeconds=1' ./00-install.yaml + render_install_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example simplest 02 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 02 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-collector-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-collector-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-collector-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-collector-with-priority-class\e[0m' Rendering files for test examples-collector-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-business-application-injected-sidecar + '[' examples-business-application-injected-sidecar '!=' _build ']' + cd .. + mkdir -p examples-collector-with-priority-class + cd examples-collector-with-priority-class + example_name=collector-with-priority-class + render_install_example collector-with-priority-class 00 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/collector-with-priority-class.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + JAEGER_NAME=collector-with-high-priority + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example collector-with-priority-class 01 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + jaeger_name=collector-with-high-priority + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test collector-with-high-priority true 01 + '[' 3 -ne 3 ']' + jaeger=collector-with-high-priority + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + export JAEGER_NAME=collector-with-high-priority + JAEGER_NAME=collector-with-high-priority + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-service-types + '[' 1 -ne 1 ']' + test_name=examples-service-types + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-service-types' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-service-types\e[0m' Rendering files for test examples-service-types + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-collector-with-priority-class + '[' examples-collector-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-service-types + cd examples-service-types + example_name=service-types + render_install_example service-types 00 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/service-types.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + JAEGER_NAME=service-types + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example service-types 01 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/service-types.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/service-types.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/service-types.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/service-types.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + jaeger_name=service-types + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test service-types true 01 + '[' 3 -ne 3 ']' + jaeger=service-types + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + export JAEGER_NAME=service-types + JAEGER_NAME=service-types + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod + '[' 1 -ne 1 ']' + test_name=examples-simple-prod + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod\e[0m' Rendering files for test examples-simple-prod + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-service-types + '[' examples-service-types '!=' _build ']' + cd .. + mkdir -p examples-simple-prod + cd examples-simple-prod + example_name=simple-prod + render_install_example simple-prod 01 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod 02 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod-with-volumes + '[' 1 -ne 1 ']' + test_name=examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod-with-volumes' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod-with-volumes\e[0m' Rendering files for test examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod + '[' examples-simple-prod '!=' _build ']' + cd .. + mkdir -p examples-simple-prod-with-volumes + cd examples-simple-prod-with-volumes + example_name=simple-prod-with-volumes + render_install_example simple-prod-with-volumes 01 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod-with-volumes 02 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + /tmp/jaeger-tests/bin/gomplate -f ./03-check-volume.yaml.template -o 03-check-volume.yaml + start_test examples-simplest + '[' 1 -ne 1 ']' + test_name=examples-simplest + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simplest' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simplest\e[0m' Rendering files for test examples-simplest + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod-with-volumes + '[' examples-simple-prod-with-volumes '!=' _build ']' + cd .. + mkdir -p examples-simplest + cd examples-simplest + example_name=simplest + render_install_example simplest 00 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 01 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger + '[' 1 -ne 1 ']' + test_name=examples-with-badger + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger\e[0m' Rendering files for test examples-with-badger + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simplest + '[' examples-simplest '!=' _build ']' + cd .. + mkdir -p examples-with-badger + cd examples-with-badger + example_name=with-badger + render_install_example with-badger 00 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + JAEGER_NAME=with-badger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger 01 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + jaeger_name=with-badger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + export JAEGER_NAME=with-badger + JAEGER_NAME=with-badger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger-and-volume + '[' 1 -ne 1 ']' + test_name=examples-with-badger-and-volume + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger-and-volume' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger-and-volume\e[0m' Rendering files for test examples-with-badger-and-volume + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger + '[' examples-with-badger '!=' _build ']' + cd .. + mkdir -p examples-with-badger-and-volume + cd examples-with-badger-and-volume + example_name=with-badger-and-volume + render_install_example with-badger-and-volume 00 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger-and-volume.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + JAEGER_NAME=with-badger-and-volume + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger-and-volume 01 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + jaeger_name=with-badger-and-volume + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger-and-volume true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger-and-volume + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + export JAEGER_NAME=with-badger-and-volume + JAEGER_NAME=with-badger-and-volume + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-cassandra + '[' 1 -ne 1 ']' + test_name=examples-with-cassandra + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-cassandra' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-cassandra\e[0m' Rendering files for test examples-with-cassandra + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger-and-volume + '[' examples-with-badger-and-volume '!=' _build ']' + cd .. + mkdir -p examples-with-cassandra + cd examples-with-cassandra + example_name=with-cassandra + render_install_cassandra 00 + '[' 1 -ne 1 ']' + test_step=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-assert.yaml.template -o ./00-assert.yaml + render_install_example with-cassandra 01 + '[' 2 -ne 2 ']' + example_name=with-cassandra + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-cassandra.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=with-cassandra ++ '[' -z with-cassandra ']' ++ echo with-cassandra ++ return 0 + JAEGER_NAME=with-cassandra + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example with-cassandra 02 + '[' 2 -ne 2 ']' + example_name=with-cassandra + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/with-cassandra.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-cassandra.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-cassandra.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-cassandra.yaml ++ jaeger_name=with-cassandra ++ '[' -z with-cassandra ']' ++ echo with-cassandra ++ return 0 + jaeger_name=with-cassandra + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-cassandra true 02 + '[' 3 -ne 3 ']' + jaeger=with-cassandra + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 + JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 + export JAEGER_NAME=with-cassandra + JAEGER_NAME=with-cassandra + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-sampling + '[' 1 -ne 1 ']' + test_name=examples-with-sampling + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-sampling' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-sampling\e[0m' Rendering files for test examples-with-sampling + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-cassandra + '[' examples-with-cassandra '!=' _build ']' + cd .. + mkdir -p examples-with-sampling + cd examples-with-sampling + export example_name=with-sampling + example_name=with-sampling + render_install_cassandra 00 + '[' 1 -ne 1 ']' + test_step=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-assert.yaml.template -o ./00-assert.yaml + render_install_example with-sampling 01 + '[' 2 -ne 2 ']' + example_name=with-sampling + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-sampling.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=with-sampling ++ '[' -z with-sampling ']' ++ echo with-sampling ++ return 0 + JAEGER_NAME=with-sampling + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example with-sampling 02 + '[' 2 -ne 2 ']' + example_name=with-sampling + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/with-sampling.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-sampling.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-sampling.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-sampling.yaml ++ jaeger_name=with-sampling ++ '[' -z with-sampling ']' ++ echo with-sampling ++ return 0 + jaeger_name=with-sampling + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-sampling true 02 + '[' 3 -ne 3 ']' + jaeger=with-sampling + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 + JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 + export JAEGER_NAME=with-sampling + JAEGER_NAME=with-sampling + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-agent-as-daemonset + '[' 1 -ne 1 ']' + test_name=examples-agent-as-daemonset + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-as-daemonset' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-as-daemonset\e[0m' Rendering files for test examples-agent-as-daemonset + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-sampling + '[' examples-with-sampling '!=' _build ']' + cd .. + mkdir -p examples-agent-as-daemonset + cd examples-agent-as-daemonset + '[' true = true ']' + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/agent-as-daemonset.yaml -o 02-install.yaml + '[' true = true ']' + start_test examples-openshift-with-htpasswd + '[' 1 -ne 1 ']' + test_name=examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-openshift-with-htpasswd' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-openshift-with-htpasswd\e[0m' Rendering files for test examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-as-daemonset + '[' examples-agent-as-daemonset '!=' _build ']' + cd .. + mkdir -p examples-openshift-with-htpasswd + cd examples-openshift-with-htpasswd + export JAEGER_NAME=with-htpasswd + JAEGER_NAME=with-htpasswd + export JAEGER_USERNAME=awesomeuser + JAEGER_USERNAME=awesomeuser + export JAEGER_PASSWORD=awesomepassword + JAEGER_PASSWORD=awesomepassword + export 'JAEGER_USER_PASSWORD_HASH=awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' + JAEGER_USER_PASSWORD_HASH='awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ echo 'awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ base64 + SECRET=YXdlc29tZXVzZXI6e1NIQX11VWRxUFZVeXFOQm1FUlUwUXhqM0tGYVpuanc9Cg== + /tmp/jaeger-tests/bin/gomplate -f ./00-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/with-htpasswd.yaml -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + export 'GET_URL_COMMAND=kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + export 'URL=https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + INSECURE=true + JAEGER_USERNAME= + JAEGER_PASSWORD= + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-unsecured.yaml + JAEGER_USERNAME=wronguser + JAEGER_PASSWORD=wrongpassword + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-check-unauthorized.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./04-check-authorized.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running examples E2E tests' Running examples E2E tests + cd tests/e2e/examples/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3803946269 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 15 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/examples-agent-as-daemonset === PAUSE kuttl/harness/examples-agent-as-daemonset === RUN kuttl/harness/examples-agent-with-priority-class === PAUSE kuttl/harness/examples-agent-with-priority-class === RUN kuttl/harness/examples-all-in-one-with-options === PAUSE kuttl/harness/examples-all-in-one-with-options === RUN kuttl/harness/examples-business-application-injected-sidecar === PAUSE kuttl/harness/examples-business-application-injected-sidecar === RUN kuttl/harness/examples-collector-with-priority-class === PAUSE kuttl/harness/examples-collector-with-priority-class === RUN kuttl/harness/examples-openshift-with-htpasswd === PAUSE kuttl/harness/examples-openshift-with-htpasswd === RUN kuttl/harness/examples-service-types === PAUSE kuttl/harness/examples-service-types === RUN kuttl/harness/examples-simple-prod === PAUSE kuttl/harness/examples-simple-prod === RUN kuttl/harness/examples-simple-prod-with-volumes === PAUSE kuttl/harness/examples-simple-prod-with-volumes === RUN kuttl/harness/examples-simplest === PAUSE kuttl/harness/examples-simplest === RUN kuttl/harness/examples-with-badger === PAUSE kuttl/harness/examples-with-badger === RUN kuttl/harness/examples-with-badger-and-volume === PAUSE kuttl/harness/examples-with-badger-and-volume === RUN kuttl/harness/examples-with-cassandra === PAUSE kuttl/harness/examples-with-cassandra === RUN kuttl/harness/examples-with-sampling === PAUSE kuttl/harness/examples-with-sampling === CONT kuttl/harness/artifacts logger.go:42: 06:58:12 | artifacts | Creating namespace: kuttl-test-notable-starling logger.go:42: 06:58:12 | artifacts | artifacts events from ns kuttl-test-notable-starling: logger.go:42: 06:58:12 | artifacts | Deleting namespace: kuttl-test-notable-starling === CONT kuttl/harness/examples-with-sampling logger.go:42: 06:58:19 | examples-with-sampling | Creating namespace: kuttl-test-funky-unicorn logger.go:42: 06:58:19 | examples-with-sampling/0-install | starting test step 0-install logger.go:42: 06:58:19 | examples-with-sampling/0-install | running command: [sh -c cd /tmp/jaeger-tests && make cassandra STORAGE_NAMESPACE=$NAMESPACE] logger.go:42: 06:58:19 | examples-with-sampling/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 06:58:19 | examples-with-sampling/0-install | >>>> Creating namespace kuttl-test-funky-unicorn logger.go:42: 06:58:19 | examples-with-sampling/0-install | kubectl create namespace kuttl-test-funky-unicorn 2>&1 | grep -v "already exists" || true logger.go:42: 06:58:19 | examples-with-sampling/0-install | kubectl create -f ./tests/cassandra.yml --namespace kuttl-test-funky-unicorn 2>&1 | grep -v "already exists" || true logger.go:42: 06:58:19 | examples-with-sampling/0-install | service/cassandra created logger.go:42: 06:58:19 | examples-with-sampling/0-install | statefulset.apps/cassandra created logger.go:42: 06:58:19 | examples-with-sampling/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 06:58:29 | examples-with-sampling/0-install | test step completed 0-install logger.go:42: 06:58:29 | examples-with-sampling/1-install | starting test step 1-install logger.go:42: 06:58:29 | examples-with-sampling/1-install | Jaeger:kuttl-test-funky-unicorn/with-sampling created logger.go:42: 06:58:35 | examples-with-sampling/1-install | test step completed 1-install logger.go:42: 06:58:35 | examples-with-sampling/2-smoke-test | starting test step 2-smoke-test logger.go:42: 06:58:35 | examples-with-sampling/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-sampling /dev/null] logger.go:42: 06:58:37 | examples-with-sampling/2-smoke-test | Warning: resource jaegers/with-sampling is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 06:58:43 | examples-with-sampling/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 06:58:43 | examples-with-sampling/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 06:58:44 | examples-with-sampling/2-smoke-test | job.batch/report-span created logger.go:42: 06:58:44 | examples-with-sampling/2-smoke-test | job.batch/check-span created logger.go:42: 06:58:56 | examples-with-sampling/2-smoke-test | test step completed 2-smoke-test logger.go:42: 06:58:56 | examples-with-sampling/3- | starting test step 3- logger.go:42: 06:58:56 | examples-with-sampling/3- | test step completed 3- logger.go:42: 06:58:56 | examples-with-sampling | examples-with-sampling events from ns kuttl-test-funky-unicorn: logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:19 +0000 UTC Normal Pod cassandra-0 Binding Scheduled Successfully assigned kuttl-test-funky-unicorn/cassandra-0 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:19 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-0 in StatefulSet cassandra successful statefulset-controller logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:20 +0000 UTC Normal Pod cassandra-0 AddedInterface Add eth0 [10.129.2.22/23] from ovn-kubernetes logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:20 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulling Pulling image "cassandra:3.11" kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:24 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulled Successfully pulled image "cassandra:3.11" in 4.172s (4.172s including waiting) kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:24 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:24 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:24 +0000 UTC Normal Pod cassandra-1 Binding Scheduled Successfully assigned kuttl-test-funky-unicorn/cassandra-1 to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:24 +0000 UTC Normal Pod cassandra-1 AddedInterface Add eth0 [10.128.2.39/23] from ovn-kubernetes logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:24 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulling Pulling image "cassandra:3.11" kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:24 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-1 in StatefulSet cassandra successful statefulset-controller logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:28 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulled Successfully pulled image "cassandra:3.11" in 3.974s (3.974s including waiting) kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:28 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:28 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:33 +0000 UTC Normal Pod with-sampling-f9774f6d4-zwdzr Binding Scheduled Successfully assigned kuttl-test-funky-unicorn/with-sampling-f9774f6d4-zwdzr to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:33 +0000 UTC Warning Pod with-sampling-f9774f6d4-zwdzr FailedMount MountVolume.SetUp failed for volume "with-sampling-collector-tls-config-volume" : secret "with-sampling-collector-headless-tls" not found kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:33 +0000 UTC Normal ReplicaSet.apps with-sampling-f9774f6d4 SuccessfulCreate Created pod: with-sampling-f9774f6d4-zwdzr replicaset-controller logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:33 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled up replica set with-sampling-f9774f6d4 to 1 deployment-controller logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:34 +0000 UTC Normal Pod with-sampling-f9774f6d4-zwdzr AddedInterface Add eth0 [10.129.2.23/23] from ovn-kubernetes logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:34 +0000 UTC Normal Pod with-sampling-f9774f6d4-zwdzr.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:34 +0000 UTC Normal Pod with-sampling-f9774f6d4-zwdzr.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:34 +0000 UTC Normal Pod with-sampling-f9774f6d4-zwdzr.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:34 +0000 UTC Normal Pod with-sampling-f9774f6d4-zwdzr.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:34 +0000 UTC Normal Pod with-sampling-f9774f6d4-zwdzr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:34 +0000 UTC Normal Pod with-sampling-f9774f6d4-zwdzr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:39 +0000 UTC Normal Pod with-sampling-f9774f6d4-zwdzr.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:39 +0000 UTC Normal Pod with-sampling-f9774f6d4-zwdzr.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:39 +0000 UTC Normal ReplicaSet.apps with-sampling-f9774f6d4 SuccessfulDelete Deleted pod: with-sampling-f9774f6d4-zwdzr replicaset-controller logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:39 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled down replica set with-sampling-f9774f6d4 to 0 from 1 deployment-controller logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:40 +0000 UTC Normal Pod with-sampling-fcb87d6c7-pffd2 Binding Scheduled Successfully assigned kuttl-test-funky-unicorn/with-sampling-fcb87d6c7-pffd2 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:40 +0000 UTC Normal Pod with-sampling-fcb87d6c7-pffd2 AddedInterface Add eth0 [10.129.2.24/23] from ovn-kubernetes logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:40 +0000 UTC Normal Pod with-sampling-fcb87d6c7-pffd2.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:40 +0000 UTC Normal ReplicaSet.apps with-sampling-fcb87d6c7 SuccessfulCreate Created pod: with-sampling-fcb87d6c7-pffd2 replicaset-controller logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:40 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled up replica set with-sampling-fcb87d6c7 to 1 deployment-controller logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:41 +0000 UTC Normal Pod with-sampling-fcb87d6c7-pffd2.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:41 +0000 UTC Normal Pod with-sampling-fcb87d6c7-pffd2.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:41 +0000 UTC Normal Pod with-sampling-fcb87d6c7-pffd2.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:41 +0000 UTC Normal Pod with-sampling-fcb87d6c7-pffd2.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:41 +0000 UTC Normal Pod with-sampling-fcb87d6c7-pffd2.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:44 +0000 UTC Normal Pod check-span-b98pt Binding Scheduled Successfully assigned kuttl-test-funky-unicorn/check-span-b98pt to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:44 +0000 UTC Normal Pod check-span-b98pt AddedInterface Add eth0 [10.128.2.40/23] from ovn-kubernetes logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:44 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-b98pt job-controller logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:44 +0000 UTC Normal Pod report-span-n4flf Binding Scheduled Successfully assigned kuttl-test-funky-unicorn/report-span-n4flf to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:44 +0000 UTC Normal Pod report-span-n4flf AddedInterface Add eth0 [10.131.0.58/23] from ovn-kubernetes logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:44 +0000 UTC Normal Pod report-span-n4flf.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:44 +0000 UTC Normal Pod report-span-n4flf.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:44 +0000 UTC Normal Pod report-span-n4flf.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:44 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-n4flf job-controller logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:45 +0000 UTC Normal Pod check-span-b98pt.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:45 +0000 UTC Normal Pod check-span-b98pt.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:45 +0000 UTC Normal Pod check-span-b98pt.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:58:56 | examples-with-sampling | 2023-12-18 06:58:56 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 06:58:56 | examples-with-sampling | Deleting namespace: kuttl-test-funky-unicorn === CONT kuttl/harness/examples-with-cassandra logger.go:42: 06:59:15 | examples-with-cassandra | Creating namespace: kuttl-test-enhanced-squirrel logger.go:42: 06:59:15 | examples-with-cassandra/0-install | starting test step 0-install logger.go:42: 06:59:15 | examples-with-cassandra/0-install | running command: [sh -c cd /tmp/jaeger-tests && make cassandra STORAGE_NAMESPACE=$NAMESPACE] logger.go:42: 06:59:15 | examples-with-cassandra/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 06:59:15 | examples-with-cassandra/0-install | >>>> Creating namespace kuttl-test-enhanced-squirrel logger.go:42: 06:59:15 | examples-with-cassandra/0-install | kubectl create namespace kuttl-test-enhanced-squirrel 2>&1 | grep -v "already exists" || true logger.go:42: 06:59:15 | examples-with-cassandra/0-install | kubectl create -f ./tests/cassandra.yml --namespace kuttl-test-enhanced-squirrel 2>&1 | grep -v "already exists" || true logger.go:42: 06:59:15 | examples-with-cassandra/0-install | service/cassandra created logger.go:42: 06:59:15 | examples-with-cassandra/0-install | statefulset.apps/cassandra created logger.go:42: 06:59:15 | examples-with-cassandra/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 06:59:17 | examples-with-cassandra/0-install | test step completed 0-install logger.go:42: 06:59:17 | examples-with-cassandra/1-install | starting test step 1-install logger.go:42: 06:59:17 | examples-with-cassandra/1-install | Jaeger:kuttl-test-enhanced-squirrel/with-cassandra created logger.go:42: 07:00:17 | examples-with-cassandra/1-install | test step completed 1-install logger.go:42: 07:00:17 | examples-with-cassandra/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:00:17 | examples-with-cassandra/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-cassandra /dev/null] logger.go:42: 07:00:19 | examples-with-cassandra/2-smoke-test | Warning: resource jaegers/with-cassandra is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:00:25 | examples-with-cassandra/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:00:25 | examples-with-cassandra/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:00:26 | examples-with-cassandra/2-smoke-test | job.batch/report-span created logger.go:42: 07:00:26 | examples-with-cassandra/2-smoke-test | job.batch/check-span created logger.go:42: 07:00:38 | examples-with-cassandra/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:00:38 | examples-with-cassandra | examples-with-cassandra events from ns kuttl-test-enhanced-squirrel: logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:15 +0000 UTC Normal Pod cassandra-0 Binding Scheduled Successfully assigned kuttl-test-enhanced-squirrel/cassandra-0 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:15 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-0 in StatefulSet cassandra successful statefulset-controller logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:16 +0000 UTC Normal Pod cassandra-0 AddedInterface Add eth0 [10.129.2.25/23] from ovn-kubernetes logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:16 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulled Container image "cassandra:3.11" already present on machine kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:16 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:16 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:16 +0000 UTC Normal Pod cassandra-1 Binding Scheduled Successfully assigned kuttl-test-enhanced-squirrel/cassandra-1 to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:16 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-1 in StatefulSet cassandra successful statefulset-controller logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:17 +0000 UTC Normal Pod cassandra-1 AddedInterface Add eth0 [10.128.2.41/23] from ovn-kubernetes logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:17 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulled Container image "cassandra:3.11" already present on machine kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:17 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:17 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:21 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-t49zl Binding Scheduled Successfully assigned kuttl-test-enhanced-squirrel/with-cassandra-cassandra-schema-job-t49zl to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:21 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-t49zl AddedInterface Add eth0 [10.129.2.26/23] from ovn-kubernetes logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:21 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-t49zl.spec.containers{with-cassandra-cassandra-schema-job} Pulling Pulling image "jaegertracing/jaeger-cassandra-schema:1.51.0" kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:21 +0000 UTC Normal Job.batch with-cassandra-cassandra-schema-job SuccessfulCreate Created pod: with-cassandra-cassandra-schema-job-t49zl job-controller logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:26 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-t49zl.spec.containers{with-cassandra-cassandra-schema-job} Pulled Successfully pulled image "jaegertracing/jaeger-cassandra-schema:1.51.0" in 4.862s (4.862s including waiting) kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:26 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-t49zl.spec.containers{with-cassandra-cassandra-schema-job} Created Created container with-cassandra-cassandra-schema-job kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:26 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-t49zl.spec.containers{with-cassandra-cassandra-schema-job} Started Started container with-cassandra-cassandra-schema-job kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:32 +0000 UTC Normal Job.batch with-cassandra-cassandra-schema-job Completed Job completed job-controller logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:33 +0000 UTC Normal Pod with-cassandra-6f9c6f9b5f-qdkdr Binding Scheduled Successfully assigned kuttl-test-enhanced-squirrel/with-cassandra-6f9c6f9b5f-qdkdr to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:33 +0000 UTC Normal Pod with-cassandra-6f9c6f9b5f-qdkdr AddedInterface Add eth0 [10.129.2.27/23] from ovn-kubernetes logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:33 +0000 UTC Normal Pod with-cassandra-6f9c6f9b5f-qdkdr.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:33 +0000 UTC Normal Pod with-cassandra-6f9c6f9b5f-qdkdr.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:33 +0000 UTC Normal Pod with-cassandra-6f9c6f9b5f-qdkdr.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:33 +0000 UTC Normal Pod with-cassandra-6f9c6f9b5f-qdkdr.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:33 +0000 UTC Normal Pod with-cassandra-6f9c6f9b5f-qdkdr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:33 +0000 UTC Normal ReplicaSet.apps with-cassandra-6f9c6f9b5f SuccessfulCreate Created pod: with-cassandra-6f9c6f9b5f-qdkdr replicaset-controller logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:33 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled up replica set with-cassandra-6f9c6f9b5f to 1 deployment-controller logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:34 +0000 UTC Normal Pod with-cassandra-6f9c6f9b5f-qdkdr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 06:59:35 +0000 UTC Warning Pod with-cassandra-6f9c6f9b5f-qdkdr.spec.containers{jaeger} BackOff Back-off restarting failed container jaeger in pod with-cassandra-6f9c6f9b5f-qdkdr_kuttl-test-enhanced-squirrel(70500a82-4ad4-4a7b-879b-b8a2efc2a672) kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:21 +0000 UTC Normal Pod with-cassandra-5b69c8d96-vgkx6 Binding Scheduled Successfully assigned kuttl-test-enhanced-squirrel/with-cassandra-5b69c8d96-vgkx6 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:21 +0000 UTC Normal ReplicaSet.apps with-cassandra-5b69c8d96 SuccessfulCreate Created pod: with-cassandra-5b69c8d96-vgkx6 replicaset-controller logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:21 +0000 UTC Normal Pod with-cassandra-6f9c6f9b5f-qdkdr.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:21 +0000 UTC Normal Pod with-cassandra-6f9c6f9b5f-qdkdr.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:21 +0000 UTC Normal ReplicaSet.apps with-cassandra-6f9c6f9b5f SuccessfulDelete Deleted pod: with-cassandra-6f9c6f9b5f-qdkdr replicaset-controller logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:21 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled down replica set with-cassandra-6f9c6f9b5f to 0 from 1 deployment-controller logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:21 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled up replica set with-cassandra-5b69c8d96 to 1 deployment-controller logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:22 +0000 UTC Normal Pod with-cassandra-5b69c8d96-vgkx6 AddedInterface Add eth0 [10.129.2.28/23] from ovn-kubernetes logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:22 +0000 UTC Normal Pod with-cassandra-5b69c8d96-vgkx6.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:22 +0000 UTC Normal Pod with-cassandra-5b69c8d96-vgkx6.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:22 +0000 UTC Normal Pod with-cassandra-5b69c8d96-vgkx6.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:22 +0000 UTC Normal Pod with-cassandra-5b69c8d96-vgkx6.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:22 +0000 UTC Normal Pod with-cassandra-5b69c8d96-vgkx6.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:22 +0000 UTC Normal Pod with-cassandra-5b69c8d96-vgkx6.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:26 +0000 UTC Normal Pod check-span-9mwx2 Binding Scheduled Successfully assigned kuttl-test-enhanced-squirrel/check-span-9mwx2 to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:26 +0000 UTC Normal Pod check-span-9mwx2 AddedInterface Add eth0 [10.128.2.42/23] from ovn-kubernetes logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:26 +0000 UTC Normal Pod check-span-9mwx2.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:26 +0000 UTC Normal Pod check-span-9mwx2.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:26 +0000 UTC Normal Pod check-span-9mwx2.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:26 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-9mwx2 job-controller logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:26 +0000 UTC Normal Pod report-span-g8cx5 Binding Scheduled Successfully assigned kuttl-test-enhanced-squirrel/report-span-g8cx5 to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:26 +0000 UTC Normal Pod report-span-g8cx5 AddedInterface Add eth0 [10.131.0.60/23] from ovn-kubernetes logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:26 +0000 UTC Normal Pod report-span-g8cx5.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:26 +0000 UTC Normal Pod report-span-g8cx5.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:26 +0000 UTC Normal Pod report-span-g8cx5.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:26 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-g8cx5 job-controller logger.go:42: 07:00:38 | examples-with-cassandra | 2023-12-18 07:00:37 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:00:38 | examples-with-cassandra | Deleting namespace: kuttl-test-enhanced-squirrel === CONT kuttl/harness/examples-with-badger-and-volume logger.go:42: 07:00:51 | examples-with-badger-and-volume | Creating namespace: kuttl-test-optimum-calf logger.go:42: 07:00:51 | examples-with-badger-and-volume/0-install | starting test step 0-install logger.go:42: 07:00:51 | examples-with-badger-and-volume/0-install | Jaeger:kuttl-test-optimum-calf/with-badger-and-volume created logger.go:42: 07:00:58 | examples-with-badger-and-volume/0-install | test step completed 0-install logger.go:42: 07:00:58 | examples-with-badger-and-volume/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:00:58 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-badger-and-volume /dev/null] logger.go:42: 07:00:59 | examples-with-badger-and-volume/1-smoke-test | Warning: resource jaegers/with-badger-and-volume is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:01:05 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:01:06 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:01:06 | examples-with-badger-and-volume/1-smoke-test | job.batch/report-span created logger.go:42: 07:01:06 | examples-with-badger-and-volume/1-smoke-test | job.batch/check-span created logger.go:42: 07:01:18 | examples-with-badger-and-volume/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:01:18 | examples-with-badger-and-volume | examples-with-badger-and-volume events from ns kuttl-test-optimum-calf: logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:00:54 +0000 UTC Normal Pod with-badger-and-volume-85dc7d4967-dl5qg Binding Scheduled Successfully assigned kuttl-test-optimum-calf/with-badger-and-volume-85dc7d4967-dl5qg to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:00:54 +0000 UTC Normal ReplicaSet.apps with-badger-and-volume-85dc7d4967 SuccessfulCreate Created pod: with-badger-and-volume-85dc7d4967-dl5qg replicaset-controller logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:00:54 +0000 UTC Normal Deployment.apps with-badger-and-volume ScalingReplicaSet Scaled up replica set with-badger-and-volume-85dc7d4967 to 1 deployment-controller logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:00:55 +0000 UTC Warning Pod with-badger-and-volume-85dc7d4967-dl5qg FailedMount MountVolume.SetUp failed for volume "with-badger-and-volume-ui-oauth-proxy-tls" : secret "with-badger-and-volume-ui-oauth-proxy-tls" not found kubelet logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:00:55 +0000 UTC Normal Pod with-badger-and-volume-85dc7d4967-dl5qg AddedInterface Add eth0 [10.129.2.29/23] from ovn-kubernetes logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:00:55 +0000 UTC Normal Pod with-badger-and-volume-85dc7d4967-dl5qg.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:00:56 +0000 UTC Normal Pod with-badger-and-volume-85dc7d4967-dl5qg.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:00:56 +0000 UTC Normal Pod with-badger-and-volume-85dc7d4967-dl5qg.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:00:56 +0000 UTC Normal Pod with-badger-and-volume-85dc7d4967-dl5qg.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:00:56 +0000 UTC Normal Pod with-badger-and-volume-85dc7d4967-dl5qg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:00:56 +0000 UTC Normal Pod with-badger-and-volume-85dc7d4967-dl5qg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:02 +0000 UTC Normal ReplicaSet.apps with-badger-and-volume-79bc8d9ff6 SuccessfulCreate Created pod: with-badger-and-volume-79bc8d9ff6-xv946 replicaset-controller logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:02 +0000 UTC Normal Pod with-badger-and-volume-85dc7d4967-dl5qg.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:02 +0000 UTC Normal Pod with-badger-and-volume-85dc7d4967-dl5qg.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:02 +0000 UTC Normal ReplicaSet.apps with-badger-and-volume-85dc7d4967 SuccessfulDelete Deleted pod: with-badger-and-volume-85dc7d4967-dl5qg replicaset-controller logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:02 +0000 UTC Normal Deployment.apps with-badger-and-volume ScalingReplicaSet Scaled down replica set with-badger-and-volume-85dc7d4967 to 0 from 1 deployment-controller logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:02 +0000 UTC Normal Deployment.apps with-badger-and-volume ScalingReplicaSet Scaled up replica set with-badger-and-volume-79bc8d9ff6 to 1 deployment-controller logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:03 +0000 UTC Normal Pod with-badger-and-volume-79bc8d9ff6-xv946 Binding Scheduled Successfully assigned kuttl-test-optimum-calf/with-badger-and-volume-79bc8d9ff6-xv946 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:03 +0000 UTC Normal Pod with-badger-and-volume-79bc8d9ff6-xv946 AddedInterface Add eth0 [10.129.2.30/23] from ovn-kubernetes logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:03 +0000 UTC Normal Pod with-badger-and-volume-79bc8d9ff6-xv946.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:03 +0000 UTC Normal Pod with-badger-and-volume-79bc8d9ff6-xv946.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:03 +0000 UTC Normal Pod with-badger-and-volume-79bc8d9ff6-xv946.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:03 +0000 UTC Normal Pod with-badger-and-volume-79bc8d9ff6-xv946.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:03 +0000 UTC Normal Pod with-badger-and-volume-79bc8d9ff6-xv946.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:03 +0000 UTC Normal Pod with-badger-and-volume-79bc8d9ff6-xv946.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:06 +0000 UTC Normal Pod check-span-z6h84 Binding Scheduled Successfully assigned kuttl-test-optimum-calf/check-span-z6h84 to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:06 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-z6h84 job-controller logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:06 +0000 UTC Normal Pod report-span-9cx4h Binding Scheduled Successfully assigned kuttl-test-optimum-calf/report-span-9cx4h to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:06 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-9cx4h job-controller logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:07 +0000 UTC Normal Pod check-span-z6h84 AddedInterface Add eth0 [10.131.0.61/23] from ovn-kubernetes logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:07 +0000 UTC Normal Pod check-span-z6h84.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:07 +0000 UTC Normal Pod check-span-z6h84.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:07 +0000 UTC Normal Pod check-span-z6h84.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:07 +0000 UTC Normal Pod report-span-9cx4h AddedInterface Add eth0 [10.128.2.43/23] from ovn-kubernetes logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:07 +0000 UTC Normal Pod report-span-9cx4h.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:07 +0000 UTC Normal Pod report-span-9cx4h.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:07 +0000 UTC Normal Pod report-span-9cx4h.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:01:18 | examples-with-badger-and-volume | 2023-12-18 07:01:18 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:01:18 | examples-with-badger-and-volume | Deleting namespace: kuttl-test-optimum-calf === CONT kuttl/harness/examples-with-badger logger.go:42: 07:01:26 | examples-with-badger | Creating namespace: kuttl-test-promoted-lioness logger.go:42: 07:01:26 | examples-with-badger/0-install | starting test step 0-install logger.go:42: 07:01:26 | examples-with-badger/0-install | Jaeger:kuttl-test-promoted-lioness/with-badger created logger.go:42: 07:01:33 | examples-with-badger/0-install | test step completed 0-install logger.go:42: 07:01:33 | examples-with-badger/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:01:33 | examples-with-badger/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-badger /dev/null] logger.go:42: 07:01:35 | examples-with-badger/1-smoke-test | Warning: resource jaegers/with-badger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:01:41 | examples-with-badger/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:01:42 | examples-with-badger/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:01:42 | examples-with-badger/1-smoke-test | job.batch/report-span created logger.go:42: 07:01:42 | examples-with-badger/1-smoke-test | job.batch/check-span created logger.go:42: 07:01:53 | examples-with-badger/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:01:53 | examples-with-badger | examples-with-badger events from ns kuttl-test-promoted-lioness: logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:30 +0000 UTC Normal Pod with-badger-775f7d884d-wtj6w Binding Scheduled Successfully assigned kuttl-test-promoted-lioness/with-badger-775f7d884d-wtj6w to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:30 +0000 UTC Normal Pod with-badger-775f7d884d-wtj6w AddedInterface Add eth0 [10.129.2.31/23] from ovn-kubernetes logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:30 +0000 UTC Normal Pod with-badger-775f7d884d-wtj6w.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:30 +0000 UTC Normal Pod with-badger-775f7d884d-wtj6w.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:30 +0000 UTC Normal Pod with-badger-775f7d884d-wtj6w.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:30 +0000 UTC Normal Pod with-badger-775f7d884d-wtj6w.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:30 +0000 UTC Normal ReplicaSet.apps with-badger-775f7d884d SuccessfulCreate Created pod: with-badger-775f7d884d-wtj6w replicaset-controller logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:30 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-775f7d884d to 1 deployment-controller logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:31 +0000 UTC Normal Pod with-badger-775f7d884d-wtj6w.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:31 +0000 UTC Normal Pod with-badger-775f7d884d-wtj6w.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:37 +0000 UTC Normal Pod with-badger-775f7d884d-wtj6w.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:37 +0000 UTC Normal Pod with-badger-775f7d884d-wtj6w.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:37 +0000 UTC Normal ReplicaSet.apps with-badger-775f7d884d SuccessfulDelete Deleted pod: with-badger-775f7d884d-wtj6w replicaset-controller logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:37 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled down replica set with-badger-775f7d884d to 0 from 1 deployment-controller logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:38 +0000 UTC Normal Pod with-badger-69b69c7795-54s9q Binding Scheduled Successfully assigned kuttl-test-promoted-lioness/with-badger-69b69c7795-54s9q to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:38 +0000 UTC Normal Pod with-badger-69b69c7795-54s9q AddedInterface Add eth0 [10.129.2.32/23] from ovn-kubernetes logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:38 +0000 UTC Normal Pod with-badger-69b69c7795-54s9q.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:38 +0000 UTC Normal Pod with-badger-69b69c7795-54s9q.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:38 +0000 UTC Normal Pod with-badger-69b69c7795-54s9q.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:38 +0000 UTC Normal Pod with-badger-69b69c7795-54s9q.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:38 +0000 UTC Normal Pod with-badger-69b69c7795-54s9q.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:38 +0000 UTC Normal Pod with-badger-69b69c7795-54s9q.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:38 +0000 UTC Normal ReplicaSet.apps with-badger-69b69c7795 SuccessfulCreate Created pod: with-badger-69b69c7795-54s9q replicaset-controller logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:38 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-69b69c7795 to 1 deployment-controller logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:42 +0000 UTC Normal Pod check-span-vhlgs Binding Scheduled Successfully assigned kuttl-test-promoted-lioness/check-span-vhlgs to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:42 +0000 UTC Normal Pod check-span-vhlgs AddedInterface Add eth0 [10.131.0.62/23] from ovn-kubernetes logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:42 +0000 UTC Normal Pod check-span-vhlgs.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:42 +0000 UTC Normal Pod check-span-vhlgs.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:42 +0000 UTC Normal Pod check-span-vhlgs.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:42 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-vhlgs job-controller logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:42 +0000 UTC Normal Pod report-span-jvjm2 Binding Scheduled Successfully assigned kuttl-test-promoted-lioness/report-span-jvjm2 to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:42 +0000 UTC Normal Pod report-span-jvjm2 AddedInterface Add eth0 [10.128.2.44/23] from ovn-kubernetes logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:42 +0000 UTC Normal Pod report-span-jvjm2.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:42 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-jvjm2 job-controller logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:43 +0000 UTC Normal Pod report-span-jvjm2.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:43 +0000 UTC Normal Pod report-span-jvjm2.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:01:53 | examples-with-badger | 2023-12-18 07:01:53 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:01:53 | examples-with-badger | Deleting namespace: kuttl-test-promoted-lioness === CONT kuttl/harness/examples-simplest logger.go:42: 07:02:06 | examples-simplest | Creating namespace: kuttl-test-lenient-ferret logger.go:42: 07:02:06 | examples-simplest/0-install | starting test step 0-install logger.go:42: 07:02:06 | examples-simplest/0-install | Jaeger:kuttl-test-lenient-ferret/simplest created logger.go:42: 07:02:12 | examples-simplest/0-install | test step completed 0-install logger.go:42: 07:02:12 | examples-simplest/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:02:12 | examples-simplest/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 07:02:14 | examples-simplest/1-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:02:20 | examples-simplest/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:02:20 | examples-simplest/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:02:21 | examples-simplest/1-smoke-test | job.batch/report-span created logger.go:42: 07:02:21 | examples-simplest/1-smoke-test | job.batch/check-span created logger.go:42: 07:02:33 | examples-simplest/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:02:33 | examples-simplest | examples-simplest events from ns kuttl-test-lenient-ferret: logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:10 +0000 UTC Normal Pod simplest-554f479c46-t94t6 Binding Scheduled Successfully assigned kuttl-test-lenient-ferret/simplest-554f479c46-t94t6 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:10 +0000 UTC Warning Pod simplest-554f479c46-t94t6 FailedMount MountVolume.SetUp failed for volume "simplest-collector-tls-config-volume" : secret "simplest-collector-headless-tls" not found kubelet logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:10 +0000 UTC Normal ReplicaSet.apps simplest-554f479c46 SuccessfulCreate Created pod: simplest-554f479c46-t94t6 replicaset-controller logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:10 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-554f479c46 to 1 deployment-controller logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:11 +0000 UTC Normal Pod simplest-554f479c46-t94t6 AddedInterface Add eth0 [10.129.2.33/23] from ovn-kubernetes logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:11 +0000 UTC Normal Pod simplest-554f479c46-t94t6.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:11 +0000 UTC Normal Pod simplest-554f479c46-t94t6.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:11 +0000 UTC Normal Pod simplest-554f479c46-t94t6.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:11 +0000 UTC Normal Pod simplest-554f479c46-t94t6.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:11 +0000 UTC Normal Pod simplest-554f479c46-t94t6.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:11 +0000 UTC Normal Pod simplest-554f479c46-t94t6.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:16 +0000 UTC Normal Pod simplest-554f479c46-t94t6.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:16 +0000 UTC Normal Pod simplest-554f479c46-t94t6.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:16 +0000 UTC Normal ReplicaSet.apps simplest-554f479c46 SuccessfulDelete Deleted pod: simplest-554f479c46-t94t6 replicaset-controller logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:16 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-554f479c46 to 0 from 1 deployment-controller logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:17 +0000 UTC Normal Pod simplest-549957679-h282v Binding Scheduled Successfully assigned kuttl-test-lenient-ferret/simplest-549957679-h282v to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:17 +0000 UTC Normal ReplicaSet.apps simplest-549957679 SuccessfulCreate Created pod: simplest-549957679-h282v replicaset-controller logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:17 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-549957679 to 1 deployment-controller logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:18 +0000 UTC Normal Pod simplest-549957679-h282v AddedInterface Add eth0 [10.129.2.34/23] from ovn-kubernetes logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:18 +0000 UTC Normal Pod simplest-549957679-h282v.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:18 +0000 UTC Normal Pod simplest-549957679-h282v.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:18 +0000 UTC Normal Pod simplest-549957679-h282v.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:18 +0000 UTC Normal Pod simplest-549957679-h282v.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:18 +0000 UTC Normal Pod simplest-549957679-h282v.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:18 +0000 UTC Normal Pod simplest-549957679-h282v.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:21 +0000 UTC Normal Pod check-span-qr4l6 Binding Scheduled Successfully assigned kuttl-test-lenient-ferret/check-span-qr4l6 to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:21 +0000 UTC Normal Pod check-span-qr4l6 AddedInterface Add eth0 [10.128.2.45/23] from ovn-kubernetes logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:21 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-qr4l6 job-controller logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:21 +0000 UTC Normal Pod report-span-nx6fm Binding Scheduled Successfully assigned kuttl-test-lenient-ferret/report-span-nx6fm to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:21 +0000 UTC Normal Pod report-span-nx6fm AddedInterface Add eth0 [10.131.0.63/23] from ovn-kubernetes logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:21 +0000 UTC Normal Pod report-span-nx6fm.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:21 +0000 UTC Normal Pod report-span-nx6fm.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:21 +0000 UTC Normal Pod report-span-nx6fm.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:21 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-nx6fm job-controller logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:22 +0000 UTC Normal Pod check-span-qr4l6.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:22 +0000 UTC Normal Pod check-span-qr4l6.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:22 +0000 UTC Normal Pod check-span-qr4l6.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:02:33 | examples-simplest | 2023-12-18 07:02:32 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:02:33 | examples-simplest | Deleting namespace: kuttl-test-lenient-ferret === CONT kuttl/harness/examples-simple-prod-with-volumes logger.go:42: 07:02:46 | examples-simple-prod-with-volumes | Ignoring 03-check-volume.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:02:46 | examples-simple-prod-with-volumes | Creating namespace: kuttl-test-allowed-mouse logger.go:42: 07:02:46 | examples-simple-prod-with-volumes/1-install | starting test step 1-install logger.go:42: 07:02:46 | examples-simple-prod-with-volumes/1-install | Jaeger:kuttl-test-allowed-mouse/simple-prod created logger.go:42: 07:03:21 | examples-simple-prod-with-volumes/1-install | test step completed 1-install logger.go:42: 07:03:21 | examples-simple-prod-with-volumes/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:03:21 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 07:03:23 | examples-simple-prod-with-volumes/2-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:03:29 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:03:30 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:03:30 | examples-simple-prod-with-volumes/2-smoke-test | job.batch/report-span created logger.go:42: 07:03:30 | examples-simple-prod-with-volumes/2-smoke-test | job.batch/check-span created logger.go:42: 07:03:41 | examples-simple-prod-with-volumes/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:03:41 | examples-simple-prod-with-volumes/3-check-volume | starting test step 3-check-volume logger.go:42: 07:03:41 | examples-simple-prod-with-volumes/3-check-volume | running command: [sh -c kubectl exec $(kubectl get pods -n $NAMESPACE -l app=jaeger -l app.kubernetes.io/component=collector -o yaml | /tmp/jaeger-tests/bin/yq e '.items[0].metadata.name') -n $NAMESPACE -- ls /usr/share/elasticsearch/data] logger.go:42: 07:03:41 | examples-simple-prod-with-volumes/3-check-volume | test step completed 3-check-volume logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | examples-simple-prod-with-volumes events from ns kuttl-test-allowed-mouse: logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:02:51 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestallowedmousesimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestallowedmousesimpleprod-1-779c968bd7 to 1 deployment-controller logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:02:52 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestallowedmousesimpleprod-1-779c968bd7 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestallowedmousesimpleprod-1-779c96jgsg7 replicaset-controller logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:02:52 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestallowedmousesimpleprod-1-779c96jgsg7 Binding Scheduled Successfully assigned kuttl-test-allowed-mouse/elasticsearch-cdm-kuttltestallowedmousesimpleprod-1-779c96jgsg7 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:02:52 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestallowedmousesimpleprod-1-779c96jgsg7 AddedInterface Add eth0 [10.129.2.35/23] from ovn-kubernetes logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:02:52 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestallowedmousesimpleprod-1-779c96jgsg7.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:4587958c428b40ac31b46a96a752c2b338814895891023b3ba96ce4c12b5906d" already present on machine kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:02:52 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestallowedmousesimpleprod-1-779c96jgsg7.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:02:52 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestallowedmousesimpleprod-1-779c96jgsg7.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:02:52 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestallowedmousesimpleprod-1-779c96jgsg7.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:8c14e8afb359f1b4ea4b8ed370d15d3b0c272b39ba3b90e5b314f3ccb5ac69c5" already present on machine kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:02:52 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestallowedmousesimpleprod-1-779c96jgsg7.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:02:52 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestallowedmousesimpleprod-1-779c96jgsg7.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:02 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestallowedmousesimpleprod-1-779c96jgsg7.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:07 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestallowedmousesimpleprod-1-779c96jgsg7.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:18 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-76bf947f79 to 1 deployment-controller logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:18 +0000 UTC Normal ReplicaSet.apps simple-prod-query-5f69696875 SuccessfulCreate Created pod: simple-prod-query-5f69696875-56pl7 replicaset-controller logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:18 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-5f69696875 to 1 deployment-controller logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:19 +0000 UTC Normal Pod simple-prod-collector-76bf947f79-m4drp Binding Scheduled Successfully assigned kuttl-test-allowed-mouse/simple-prod-collector-76bf947f79-m4drp to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:19 +0000 UTC Warning Pod simple-prod-collector-76bf947f79-m4drp FailedMount MountVolume.SetUp failed for volume "simple-prod-collector-tls-config-volume" : secret "simple-prod-collector-headless-tls" not found kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:19 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-76bf947f79 SuccessfulCreate Created pod: simple-prod-collector-76bf947f79-m4drp replicaset-controller logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:19 +0000 UTC Normal Pod simple-prod-query-5f69696875-56pl7 Binding Scheduled Successfully assigned kuttl-test-allowed-mouse/simple-prod-query-5f69696875-56pl7 to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:19 +0000 UTC Normal Pod simple-prod-query-5f69696875-56pl7 AddedInterface Add eth0 [10.128.2.46/23] from ovn-kubernetes logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:19 +0000 UTC Normal Pod simple-prod-query-5f69696875-56pl7.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:19 +0000 UTC Normal Pod simple-prod-query-5f69696875-56pl7.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:19 +0000 UTC Normal Pod simple-prod-query-5f69696875-56pl7.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:19 +0000 UTC Normal Pod simple-prod-query-5f69696875-56pl7.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:19 +0000 UTC Normal Pod simple-prod-query-5f69696875-56pl7.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:19 +0000 UTC Normal Pod simple-prod-query-5f69696875-56pl7.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:19 +0000 UTC Normal Pod simple-prod-query-5f69696875-56pl7.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:19 +0000 UTC Normal Pod simple-prod-query-5f69696875-56pl7.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:19 +0000 UTC Normal Pod simple-prod-query-5f69696875-56pl7.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:20 +0000 UTC Normal Pod simple-prod-collector-76bf947f79-m4drp AddedInterface Add eth0 [10.131.0.64/23] from ovn-kubernetes logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:20 +0000 UTC Normal Pod simple-prod-collector-76bf947f79-m4drp.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:20 +0000 UTC Normal Pod simple-prod-collector-76bf947f79-m4drp.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:20 +0000 UTC Normal Pod simple-prod-collector-76bf947f79-m4drp.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:24 +0000 UTC Normal Pod simple-prod-query-5f69696875-56pl7.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:24 +0000 UTC Normal Pod simple-prod-query-5f69696875-56pl7.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:24 +0000 UTC Normal Pod simple-prod-query-5f69696875-56pl7.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:24 +0000 UTC Normal ReplicaSet.apps simple-prod-query-5f69696875 SuccessfulDelete Deleted pod: simple-prod-query-5f69696875-56pl7 replicaset-controller logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:24 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-5f69696875 to 0 from 1 deployment-controller logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:25 +0000 UTC Normal Pod simple-prod-query-565bd9bd7f-m86cz Binding Scheduled Successfully assigned kuttl-test-allowed-mouse/simple-prod-query-565bd9bd7f-m86cz to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:25 +0000 UTC Normal ReplicaSet.apps simple-prod-query-565bd9bd7f SuccessfulCreate Created pod: simple-prod-query-565bd9bd7f-m86cz replicaset-controller logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:25 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-565bd9bd7f to 1 deployment-controller logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:26 +0000 UTC Normal Pod simple-prod-query-565bd9bd7f-m86cz AddedInterface Add eth0 [10.128.2.47/23] from ovn-kubernetes logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:26 +0000 UTC Normal Pod simple-prod-query-565bd9bd7f-m86cz.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:26 +0000 UTC Normal Pod simple-prod-query-565bd9bd7f-m86cz.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:26 +0000 UTC Normal Pod simple-prod-query-565bd9bd7f-m86cz.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:26 +0000 UTC Normal Pod simple-prod-query-565bd9bd7f-m86cz.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:26 +0000 UTC Normal Pod simple-prod-query-565bd9bd7f-m86cz.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:26 +0000 UTC Normal Pod simple-prod-query-565bd9bd7f-m86cz.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:26 +0000 UTC Normal Pod simple-prod-query-565bd9bd7f-m86cz.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:26 +0000 UTC Normal Pod simple-prod-query-565bd9bd7f-m86cz.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:26 +0000 UTC Normal Pod simple-prod-query-565bd9bd7f-m86cz.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:30 +0000 UTC Normal Pod check-span-zhztx Binding Scheduled Successfully assigned kuttl-test-allowed-mouse/check-span-zhztx to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:30 +0000 UTC Normal Pod check-span-zhztx AddedInterface Add eth0 [10.131.0.66/23] from ovn-kubernetes logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:30 +0000 UTC Normal Pod check-span-zhztx.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:30 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-zhztx job-controller logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:30 +0000 UTC Normal Pod report-span-2kzrj Binding Scheduled Successfully assigned kuttl-test-allowed-mouse/report-span-2kzrj to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:30 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-2kzrj job-controller logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:31 +0000 UTC Normal Pod check-span-zhztx.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:31 +0000 UTC Normal Pod check-span-zhztx.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:31 +0000 UTC Normal Pod report-span-2kzrj AddedInterface Add eth0 [10.131.0.65/23] from ovn-kubernetes logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:31 +0000 UTC Normal Pod report-span-2kzrj.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:31 +0000 UTC Normal Pod report-span-2kzrj.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:31 +0000 UTC Normal Pod report-span-2kzrj.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:34 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:34 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:34 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | 2023-12-18 07:03:41 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:03:41 | examples-simple-prod-with-volumes | Deleting namespace: kuttl-test-allowed-mouse === CONT kuttl/harness/examples-simple-prod logger.go:42: 07:04:18 | examples-simple-prod | Creating namespace: kuttl-test-helping-kodiak logger.go:42: 07:04:18 | examples-simple-prod/1-install | starting test step 1-install logger.go:42: 07:04:18 | examples-simple-prod/1-install | Jaeger:kuttl-test-helping-kodiak/simple-prod created logger.go:42: 07:04:55 | examples-simple-prod/1-install | test step completed 1-install logger.go:42: 07:04:55 | examples-simple-prod/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:04:55 | examples-simple-prod/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 07:04:57 | examples-simple-prod/2-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:05:03 | examples-simple-prod/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:05:04 | examples-simple-prod/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:05:04 | examples-simple-prod/2-smoke-test | job.batch/report-span created logger.go:42: 07:05:04 | examples-simple-prod/2-smoke-test | job.batch/check-span created logger.go:42: 07:05:16 | examples-simple-prod/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:05:16 | examples-simple-prod | examples-simple-prod events from ns kuttl-test-helping-kodiak: logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:25 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltesthelpingkodiaksimpleprod-1-5c9b7bdd49 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltesthelpingkodiaksimpleprod-1-5c9b7g4hwc replicaset-controller logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthelpingkodiaksimpleprod-1-5c9b7g4hwc Binding Scheduled Successfully assigned kuttl-test-helping-kodiak/elasticsearch-cdm-kuttltesthelpingkodiaksimpleprod-1-5c9b7g4hwc to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthelpingkodiaksimpleprod-1-5c9b7g4hwc AddedInterface Add eth0 [10.129.2.36/23] from ovn-kubernetes logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthelpingkodiaksimpleprod-1-5c9b7g4hwc.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:4587958c428b40ac31b46a96a752c2b338814895891023b3ba96ce4c12b5906d" already present on machine kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthelpingkodiaksimpleprod-1-5c9b7g4hwc.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthelpingkodiaksimpleprod-1-5c9b7g4hwc.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthelpingkodiaksimpleprod-1-5c9b7g4hwc.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:8c14e8afb359f1b4ea4b8ed370d15d3b0c272b39ba3b90e5b314f3ccb5ac69c5" already present on machine kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthelpingkodiaksimpleprod-1-5c9b7g4hwc.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesthelpingkodiaksimpleprod-1-5c9b7g4hwc.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:25 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltesthelpingkodiaksimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltesthelpingkodiaksimpleprod-1-5c9b7bdd49 to 1 deployment-controller logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:35 +0000 UTC Warning Pod elasticsearch-cdm-kuttltesthelpingkodiaksimpleprod-1-5c9b7g4hwc.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:40 +0000 UTC Warning Pod elasticsearch-cdm-kuttltesthelpingkodiaksimpleprod-1-5c9b7g4hwc.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:52 +0000 UTC Normal Pod simple-prod-collector-7d9fd6f968-vxb5m Binding Scheduled Successfully assigned kuttl-test-helping-kodiak/simple-prod-collector-7d9fd6f968-vxb5m to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:52 +0000 UTC Normal Pod simple-prod-collector-7d9fd6f968-vxb5m AddedInterface Add eth0 [10.128.2.48/23] from ovn-kubernetes logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:52 +0000 UTC Normal Pod simple-prod-collector-7d9fd6f968-vxb5m.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:52 +0000 UTC Normal Pod simple-prod-collector-7d9fd6f968-vxb5m.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:52 +0000 UTC Normal Pod simple-prod-collector-7d9fd6f968-vxb5m.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:52 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-7d9fd6f968 SuccessfulCreate Created pod: simple-prod-collector-7d9fd6f968-vxb5m replicaset-controller logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:52 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-7d9fd6f968 to 1 deployment-controller logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:52 +0000 UTC Normal Pod simple-prod-query-548dfc786f-nm7sp Binding Scheduled Successfully assigned kuttl-test-helping-kodiak/simple-prod-query-548dfc786f-nm7sp to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:52 +0000 UTC Normal Pod simple-prod-query-548dfc786f-nm7sp AddedInterface Add eth0 [10.131.0.67/23] from ovn-kubernetes logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:52 +0000 UTC Normal Pod simple-prod-query-548dfc786f-nm7sp.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:52 +0000 UTC Normal Pod simple-prod-query-548dfc786f-nm7sp.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:52 +0000 UTC Normal Pod simple-prod-query-548dfc786f-nm7sp.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:52 +0000 UTC Normal Pod simple-prod-query-548dfc786f-nm7sp.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:52 +0000 UTC Normal Pod simple-prod-query-548dfc786f-nm7sp.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:52 +0000 UTC Normal Pod simple-prod-query-548dfc786f-nm7sp.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:52 +0000 UTC Normal Pod simple-prod-query-548dfc786f-nm7sp.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:52 +0000 UTC Normal Pod simple-prod-query-548dfc786f-nm7sp.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:52 +0000 UTC Normal Pod simple-prod-query-548dfc786f-nm7sp.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:52 +0000 UTC Normal ReplicaSet.apps simple-prod-query-548dfc786f SuccessfulCreate Created pod: simple-prod-query-548dfc786f-nm7sp replicaset-controller logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:52 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-548dfc786f to 1 deployment-controller logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:59 +0000 UTC Normal Pod simple-prod-query-548dfc786f-nm7sp.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:59 +0000 UTC Normal Pod simple-prod-query-548dfc786f-nm7sp.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:59 +0000 UTC Normal Pod simple-prod-query-548dfc786f-nm7sp.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:59 +0000 UTC Normal ReplicaSet.apps simple-prod-query-548dfc786f SuccessfulDelete Deleted pod: simple-prod-query-548dfc786f-nm7sp replicaset-controller logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:59 +0000 UTC Normal Pod simple-prod-query-6c84c6c494-pbd5r Binding Scheduled Successfully assigned kuttl-test-helping-kodiak/simple-prod-query-6c84c6c494-pbd5r to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:59 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6c84c6c494 SuccessfulCreate Created pod: simple-prod-query-6c84c6c494-pbd5r replicaset-controller logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:59 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-548dfc786f to 0 from 1 deployment-controller logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:04:59 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-6c84c6c494 to 1 deployment-controller logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:00 +0000 UTC Normal Pod simple-prod-query-6c84c6c494-pbd5r AddedInterface Add eth0 [10.131.0.68/23] from ovn-kubernetes logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:00 +0000 UTC Normal Pod simple-prod-query-6c84c6c494-pbd5r.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:00 +0000 UTC Normal Pod simple-prod-query-6c84c6c494-pbd5r.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:00 +0000 UTC Normal Pod simple-prod-query-6c84c6c494-pbd5r.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:00 +0000 UTC Normal Pod simple-prod-query-6c84c6c494-pbd5r.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:00 +0000 UTC Normal Pod simple-prod-query-6c84c6c494-pbd5r.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:00 +0000 UTC Normal Pod simple-prod-query-6c84c6c494-pbd5r.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:00 +0000 UTC Normal Pod simple-prod-query-6c84c6c494-pbd5r.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:00 +0000 UTC Normal Pod simple-prod-query-6c84c6c494-pbd5r.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:00 +0000 UTC Normal Pod simple-prod-query-6c84c6c494-pbd5r.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:04 +0000 UTC Normal Pod check-span-b2lrf Binding Scheduled Successfully assigned kuttl-test-helping-kodiak/check-span-b2lrf to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:04 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-b2lrf job-controller logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:04 +0000 UTC Normal Pod report-span-k2nl6 Binding Scheduled Successfully assigned kuttl-test-helping-kodiak/report-span-k2nl6 to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:04 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-k2nl6 job-controller logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:05 +0000 UTC Normal Pod check-span-b2lrf AddedInterface Add eth0 [10.128.2.50/23] from ovn-kubernetes logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:05 +0000 UTC Normal Pod check-span-b2lrf.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:05 +0000 UTC Normal Pod check-span-b2lrf.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:05 +0000 UTC Normal Pod check-span-b2lrf.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:05 +0000 UTC Normal Pod report-span-k2nl6 AddedInterface Add eth0 [10.128.2.49/23] from ovn-kubernetes logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:05 +0000 UTC Normal Pod report-span-k2nl6.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:05 +0000 UTC Normal Pod report-span-k2nl6.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:05 +0000 UTC Normal Pod report-span-k2nl6.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:07 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:07 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:07 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:05:16 | examples-simple-prod | 2023-12-18 07:05:16 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:05:16 | examples-simple-prod | Deleting namespace: kuttl-test-helping-kodiak === CONT kuttl/harness/examples-service-types logger.go:42: 07:05:29 | examples-service-types | Creating namespace: kuttl-test-absolute-wahoo logger.go:42: 07:05:29 | examples-service-types/0-install | starting test step 0-install logger.go:42: 07:05:29 | examples-service-types/0-install | Jaeger:kuttl-test-absolute-wahoo/service-types created logger.go:42: 07:05:35 | examples-service-types/0-install | test step completed 0-install logger.go:42: 07:05:35 | examples-service-types/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:05:35 | examples-service-types/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE service-types /dev/null] logger.go:42: 07:05:37 | examples-service-types/1-smoke-test | Warning: resource jaegers/service-types is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:05:43 | examples-service-types/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://service-types-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:05:44 | examples-service-types/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:05:44 | examples-service-types/1-smoke-test | job.batch/report-span created logger.go:42: 07:05:44 | examples-service-types/1-smoke-test | job.batch/check-span created logger.go:42: 07:05:55 | examples-service-types/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:05:55 | examples-service-types/2- | starting test step 2- logger.go:42: 07:05:55 | examples-service-types/2- | test step completed 2- logger.go:42: 07:05:55 | examples-service-types | examples-service-types events from ns kuttl-test-absolute-wahoo: logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:33 +0000 UTC Normal Pod service-types-7dd764996d-8ptwt Binding Scheduled Successfully assigned kuttl-test-absolute-wahoo/service-types-7dd764996d-8ptwt to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:33 +0000 UTC Normal ReplicaSet.apps service-types-7dd764996d SuccessfulCreate Created pod: service-types-7dd764996d-8ptwt replicaset-controller logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:33 +0000 UTC Normal Service service-types-collector EnsuringLoadBalancer Ensuring load balancer service-controller logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:33 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-7dd764996d to 1 deployment-controller logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:34 +0000 UTC Normal Pod service-types-7dd764996d-8ptwt AddedInterface Add eth0 [10.129.2.37/23] from ovn-kubernetes logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:34 +0000 UTC Normal Pod service-types-7dd764996d-8ptwt.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:34 +0000 UTC Normal Pod service-types-7dd764996d-8ptwt.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:34 +0000 UTC Normal Pod service-types-7dd764996d-8ptwt.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:34 +0000 UTC Normal Pod service-types-7dd764996d-8ptwt.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:34 +0000 UTC Normal Pod service-types-7dd764996d-8ptwt.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:34 +0000 UTC Normal Pod service-types-7dd764996d-8ptwt.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:37 +0000 UTC Normal Service service-types-collector EnsuredLoadBalancer Ensured load balancer service-controller logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:37 +0000 UTC Normal Service service-types-query EnsuringLoadBalancer Ensuring load balancer service-controller logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:40 +0000 UTC Normal Pod service-types-7dd764996d-8ptwt.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:40 +0000 UTC Normal Pod service-types-7dd764996d-8ptwt.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:40 +0000 UTC Normal ReplicaSet.apps service-types-7dd764996d SuccessfulDelete Deleted pod: service-types-7dd764996d-8ptwt replicaset-controller logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:40 +0000 UTC Normal Service service-types-query EnsuredLoadBalancer Ensured load balancer service-controller logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:40 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled down replica set service-types-7dd764996d to 0 from 1 deployment-controller logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:41 +0000 UTC Normal Pod service-types-6f6f8779bc-sv274 Binding Scheduled Successfully assigned kuttl-test-absolute-wahoo/service-types-6f6f8779bc-sv274 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:41 +0000 UTC Normal ReplicaSet.apps service-types-6f6f8779bc SuccessfulCreate Created pod: service-types-6f6f8779bc-sv274 replicaset-controller logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:41 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-6f6f8779bc to 1 deployment-controller logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:42 +0000 UTC Normal Pod service-types-6f6f8779bc-sv274 AddedInterface Add eth0 [10.129.2.38/23] from ovn-kubernetes logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:42 +0000 UTC Normal Pod service-types-6f6f8779bc-sv274.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:42 +0000 UTC Normal Pod service-types-6f6f8779bc-sv274.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:42 +0000 UTC Normal Pod service-types-6f6f8779bc-sv274.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:42 +0000 UTC Normal Pod service-types-6f6f8779bc-sv274.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:42 +0000 UTC Normal Pod service-types-6f6f8779bc-sv274.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:42 +0000 UTC Normal Pod service-types-6f6f8779bc-sv274.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:44 +0000 UTC Normal Pod check-span-jz286 Binding Scheduled Successfully assigned kuttl-test-absolute-wahoo/check-span-jz286 to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:44 +0000 UTC Normal Pod check-span-jz286 AddedInterface Add eth0 [10.128.2.51/23] from ovn-kubernetes logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:44 +0000 UTC Normal Pod check-span-jz286.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:44 +0000 UTC Normal Pod check-span-jz286.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:44 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-jz286 job-controller logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:44 +0000 UTC Normal Pod report-span-4sm76 Binding Scheduled Successfully assigned kuttl-test-absolute-wahoo/report-span-4sm76 to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:44 +0000 UTC Normal Pod report-span-4sm76 AddedInterface Add eth0 [10.131.0.69/23] from ovn-kubernetes logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:44 +0000 UTC Normal Pod report-span-4sm76.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:44 +0000 UTC Normal Pod report-span-4sm76.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:44 +0000 UTC Normal Pod report-span-4sm76.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:44 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-4sm76 job-controller logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:45 +0000 UTC Normal Pod check-span-jz286.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:05:55 | examples-service-types | 2023-12-18 07:05:55 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:05:55 | examples-service-types | Deleting namespace: kuttl-test-absolute-wahoo === CONT kuttl/harness/examples-openshift-with-htpasswd logger.go:42: 07:06:23 | examples-openshift-with-htpasswd | Ignoring 00-install.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:06:23 | examples-openshift-with-htpasswd | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:06:23 | examples-openshift-with-htpasswd | Creating namespace: kuttl-test-wise-grubworm logger.go:42: 07:06:23 | examples-openshift-with-htpasswd/0-install | starting test step 0-install logger.go:42: 07:06:23 | examples-openshift-with-htpasswd/0-install | Secret:kuttl-test-wise-grubworm/htpasswd created logger.go:42: 07:06:23 | examples-openshift-with-htpasswd/0-install | test step completed 0-install logger.go:42: 07:06:23 | examples-openshift-with-htpasswd/1-install | starting test step 1-install logger.go:42: 07:06:23 | examples-openshift-with-htpasswd/1-install | Jaeger:kuttl-test-wise-grubworm/with-htpasswd created logger.go:42: 07:06:29 | examples-openshift-with-htpasswd/1-install | test step completed 1-install logger.go:42: 07:06:29 | examples-openshift-with-htpasswd/2-check-unsecured | starting test step 2-check-unsecured logger.go:42: 07:06:29 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [./ensure-ingress-host.sh] logger.go:42: 07:06:29 | examples-openshift-with-htpasswd/2-check-unsecured | Checking the Ingress host value was populated logger.go:42: 07:06:29 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 0 logger.go:42: 07:06:30 | examples-openshift-with-htpasswd/2-check-unsecured | error: error executing jsonpath "{.items[0].status.ingress[0].host}": Error executing template: array index out of bounds: index 0, length 0. Printing more information for debugging the template: logger.go:42: 07:06:30 | examples-openshift-with-htpasswd/2-check-unsecured | template was: logger.go:42: 07:06:30 | examples-openshift-with-htpasswd/2-check-unsecured | {.items[0].status.ingress[0].host} logger.go:42: 07:06:30 | examples-openshift-with-htpasswd/2-check-unsecured | object given to jsonpath engine was: logger.go:42: 07:06:30 | examples-openshift-with-htpasswd/2-check-unsecured | map[string]interface {}{"apiVersion":"v1", "items":[]interface {}{}, "kind":"List", "metadata":map[string]interface {}{"resourceVersion":""}} logger.go:42: 07:06:30 | examples-openshift-with-htpasswd/2-check-unsecured | logger.go:42: 07:06:30 | examples-openshift-with-htpasswd/2-check-unsecured | logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 1 logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/2-check-unsecured | Hostname is with-htpasswd-kuttl-test-wise-grubworm.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/2-check-unsecured | Checking an expected HTTP response logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/2-check-unsecured | Running in OpenShift logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/2-check-unsecured | Not using any secret logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 1/30 the https://with-htpasswd-kuttl-test-wise-grubworm.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/2-check-unsecured | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 2/30 the https://with-htpasswd-kuttl-test-wise-grubworm.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/2-check-unsecured | curl response asserted properly logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/2-check-unsecured | test step completed 2-check-unsecured logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/3-check-unauthorized | starting test step 3-check-unauthorized logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [./ensure-ingress-host.sh] logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking the Ingress host value was populated logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 0 logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/3-check-unauthorized | Hostname is with-htpasswd-kuttl-test-wise-grubworm.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [sh -c JAEGER_USERNAME=wronguser JAEGER_PASSWORD=wrongpassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking an expected HTTP response logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/3-check-unauthorized | Running in OpenShift logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/3-check-unauthorized | Using Jaeger basic authentication logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 1/30 the https://with-htpasswd-kuttl-test-wise-grubworm.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/3-check-unauthorized | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 2/30 the https://with-htpasswd-kuttl-test-wise-grubworm.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/3-check-unauthorized | curl response asserted properly logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/3-check-unauthorized | test step completed 3-check-unauthorized logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/4-check-authorized | starting test step 4-check-authorized logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/4-check-authorized | running command: [./ensure-ingress-host.sh] logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/4-check-authorized | Checking the Ingress host value was populated logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/4-check-authorized | Try number 0 logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/4-check-authorized | Hostname is with-htpasswd-kuttl-test-wise-grubworm.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/4-check-authorized | running command: [sh -c JAEGER_USERNAME=awesomeuser JAEGER_PASSWORD=awesomepassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE with-htpasswd] logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/4-check-authorized | Checking an expected HTTP response logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/4-check-authorized | Running in OpenShift logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/4-check-authorized | Using Jaeger basic authentication logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/4-check-authorized | Try number 1/30 the https://with-htpasswd-kuttl-test-wise-grubworm.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/4-check-authorized | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/4-check-authorized | Try number 2/30 the https://with-htpasswd-kuttl-test-wise-grubworm.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/4-check-authorized | curl response asserted properly logger.go:42: 07:06:40 | examples-openshift-with-htpasswd/4-check-authorized | test step completed 4-check-authorized logger.go:42: 07:06:40 | examples-openshift-with-htpasswd | examples-openshift-with-htpasswd events from ns kuttl-test-wise-grubworm: logger.go:42: 07:06:40 | examples-openshift-with-htpasswd | 2023-12-18 07:06:27 +0000 UTC Normal Pod with-htpasswd-c8bd5df7f-hz8zg Binding Scheduled Successfully assigned kuttl-test-wise-grubworm/with-htpasswd-c8bd5df7f-hz8zg to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:06:40 | examples-openshift-with-htpasswd | 2023-12-18 07:06:27 +0000 UTC Normal ReplicaSet.apps with-htpasswd-c8bd5df7f SuccessfulCreate Created pod: with-htpasswd-c8bd5df7f-hz8zg replicaset-controller logger.go:42: 07:06:40 | examples-openshift-with-htpasswd | 2023-12-18 07:06:27 +0000 UTC Normal Deployment.apps with-htpasswd ScalingReplicaSet Scaled up replica set with-htpasswd-c8bd5df7f to 1 deployment-controller logger.go:42: 07:06:40 | examples-openshift-with-htpasswd | 2023-12-18 07:06:28 +0000 UTC Normal Pod with-htpasswd-c8bd5df7f-hz8zg AddedInterface Add eth0 [10.129.2.39/23] from ovn-kubernetes logger.go:42: 07:06:40 | examples-openshift-with-htpasswd | 2023-12-18 07:06:28 +0000 UTC Normal Pod with-htpasswd-c8bd5df7f-hz8zg.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:06:40 | examples-openshift-with-htpasswd | 2023-12-18 07:06:28 +0000 UTC Normal Pod with-htpasswd-c8bd5df7f-hz8zg.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:06:40 | examples-openshift-with-htpasswd | 2023-12-18 07:06:28 +0000 UTC Normal Pod with-htpasswd-c8bd5df7f-hz8zg.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:06:40 | examples-openshift-with-htpasswd | 2023-12-18 07:06:28 +0000 UTC Normal Pod with-htpasswd-c8bd5df7f-hz8zg.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:06:40 | examples-openshift-with-htpasswd | 2023-12-18 07:06:28 +0000 UTC Normal Pod with-htpasswd-c8bd5df7f-hz8zg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:06:40 | examples-openshift-with-htpasswd | 2023-12-18 07:06:28 +0000 UTC Normal Pod with-htpasswd-c8bd5df7f-hz8zg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:06:40 | examples-openshift-with-htpasswd | Deleting namespace: kuttl-test-wise-grubworm === CONT kuttl/harness/examples-collector-with-priority-class logger.go:42: 07:06:47 | examples-collector-with-priority-class | Creating namespace: kuttl-test-usable-toucan logger.go:42: 07:06:47 | examples-collector-with-priority-class/0-install | starting test step 0-install logger.go:42: 07:06:47 | examples-collector-with-priority-class/0-install | PriorityClass:/collector-high-priority created logger.go:42: 07:06:47 | examples-collector-with-priority-class/0-install | Jaeger:kuttl-test-usable-toucan/collector-with-high-priority created logger.go:42: 07:06:53 | examples-collector-with-priority-class/0-install | test step completed 0-install logger.go:42: 07:06:53 | examples-collector-with-priority-class/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:06:53 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE collector-with-high-priority /dev/null] logger.go:42: 07:06:54 | examples-collector-with-priority-class/1-smoke-test | Warning: resource jaegers/collector-with-high-priority is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:07:00 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:07:01 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:07:01 | examples-collector-with-priority-class/1-smoke-test | job.batch/report-span created logger.go:42: 07:07:01 | examples-collector-with-priority-class/1-smoke-test | job.batch/check-span created logger.go:42: 07:07:12 | examples-collector-with-priority-class/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:07:12 | examples-collector-with-priority-class | examples-collector-with-priority-class events from ns kuttl-test-usable-toucan: logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:50 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-bc58d8d89 SuccessfulCreate Created pod: collector-with-high-priority-bc58d8d89-js5r5 replicaset-controller logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:50 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled up replica set collector-with-high-priority-bc58d8d89 to 1 deployment-controller logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:51 +0000 UTC Normal Pod collector-with-high-priority-bc58d8d89-js5r5 Binding Scheduled Successfully assigned kuttl-test-usable-toucan/collector-with-high-priority-bc58d8d89-js5r5 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:51 +0000 UTC Normal Pod collector-with-high-priority-bc58d8d89-js5r5 AddedInterface Add eth0 [10.129.2.40/23] from ovn-kubernetes logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:51 +0000 UTC Normal Pod collector-with-high-priority-bc58d8d89-js5r5.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:51 +0000 UTC Normal Pod collector-with-high-priority-bc58d8d89-js5r5.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:51 +0000 UTC Normal Pod collector-with-high-priority-bc58d8d89-js5r5.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:51 +0000 UTC Normal Pod collector-with-high-priority-bc58d8d89-js5r5.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:51 +0000 UTC Normal Pod collector-with-high-priority-bc58d8d89-js5r5.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:51 +0000 UTC Normal Pod collector-with-high-priority-bc58d8d89-js5r5.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:57 +0000 UTC Normal Pod collector-with-high-priority-bc58d8d89-js5r5.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:57 +0000 UTC Normal Pod collector-with-high-priority-bc58d8d89-js5r5.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:57 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-bc58d8d89 SuccessfulDelete Deleted pod: collector-with-high-priority-bc58d8d89-js5r5 replicaset-controller logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:57 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled down replica set collector-with-high-priority-bc58d8d89 to 0 from 1 deployment-controller logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:58 +0000 UTC Normal Pod collector-with-high-priority-77889fc7bc-rb992 Binding Scheduled Successfully assigned kuttl-test-usable-toucan/collector-with-high-priority-77889fc7bc-rb992 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:58 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-77889fc7bc SuccessfulCreate Created pod: collector-with-high-priority-77889fc7bc-rb992 replicaset-controller logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:58 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled up replica set collector-with-high-priority-77889fc7bc to 1 deployment-controller logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:59 +0000 UTC Normal Pod collector-with-high-priority-77889fc7bc-rb992 AddedInterface Add eth0 [10.129.2.41/23] from ovn-kubernetes logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:59 +0000 UTC Normal Pod collector-with-high-priority-77889fc7bc-rb992.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:59 +0000 UTC Normal Pod collector-with-high-priority-77889fc7bc-rb992.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:59 +0000 UTC Normal Pod collector-with-high-priority-77889fc7bc-rb992.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:59 +0000 UTC Normal Pod collector-with-high-priority-77889fc7bc-rb992.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:59 +0000 UTC Normal Pod collector-with-high-priority-77889fc7bc-rb992.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:06:59 +0000 UTC Normal Pod collector-with-high-priority-77889fc7bc-rb992.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:07:01 +0000 UTC Normal Pod check-span-hnhvs Binding Scheduled Successfully assigned kuttl-test-usable-toucan/check-span-hnhvs to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:07:01 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-hnhvs job-controller logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:07:01 +0000 UTC Normal Pod report-span-g285z Binding Scheduled Successfully assigned kuttl-test-usable-toucan/report-span-g285z to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:07:01 +0000 UTC Normal Pod report-span-g285z AddedInterface Add eth0 [10.131.0.70/23] from ovn-kubernetes logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:07:01 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-g285z job-controller logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:07:02 +0000 UTC Normal Pod check-span-hnhvs AddedInterface Add eth0 [10.128.2.52/23] from ovn-kubernetes logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:07:02 +0000 UTC Normal Pod check-span-hnhvs.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:07:02 +0000 UTC Normal Pod check-span-hnhvs.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:07:02 +0000 UTC Normal Pod check-span-hnhvs.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:07:02 +0000 UTC Normal Pod report-span-g285z.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:07:02 +0000 UTC Normal Pod report-span-g285z.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:07:02 +0000 UTC Normal Pod report-span-g285z.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:07:12 | examples-collector-with-priority-class | 2023-12-18 07:07:12 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:07:12 | examples-collector-with-priority-class | Deleting namespace: kuttl-test-usable-toucan === CONT kuttl/harness/examples-business-application-injected-sidecar logger.go:42: 07:07:25 | examples-business-application-injected-sidecar | Creating namespace: kuttl-test-poetic-bengal logger.go:42: 07:07:25 | examples-business-application-injected-sidecar/0-install | starting test step 0-install logger.go:42: 07:07:26 | examples-business-application-injected-sidecar/0-install | Deployment:kuttl-test-poetic-bengal/myapp created logger.go:42: 07:07:26 | examples-business-application-injected-sidecar/0-install | test step completed 0-install logger.go:42: 07:07:26 | examples-business-application-injected-sidecar/1-install | starting test step 1-install logger.go:42: 07:07:26 | examples-business-application-injected-sidecar/1-install | Jaeger:kuttl-test-poetic-bengal/simplest created logger.go:42: 07:07:39 | examples-business-application-injected-sidecar/1-install | test step completed 1-install logger.go:42: 07:07:39 | examples-business-application-injected-sidecar/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:07:39 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 07:07:40 | examples-business-application-injected-sidecar/2-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:07:46 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:07:47 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:07:47 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/report-span created logger.go:42: 07:07:47 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/check-span created logger.go:42: 07:07:52 | examples-business-application-injected-sidecar/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | examples-business-application-injected-sidecar events from ns kuttl-test-poetic-bengal: logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:26 +0000 UTC Normal Pod myapp-679f79d5f8-2j29q Binding Scheduled Successfully assigned kuttl-test-poetic-bengal/myapp-679f79d5f8-2j29q to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:26 +0000 UTC Normal Pod myapp-679f79d5f8-2j29q AddedInterface Add eth0 [10.129.2.42/23] from ovn-kubernetes logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:26 +0000 UTC Normal Pod myapp-679f79d5f8-2j29q.spec.containers{myapp} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:26 +0000 UTC Normal ReplicaSet.apps myapp-679f79d5f8 SuccessfulCreate Created pod: myapp-679f79d5f8-2j29q replicaset-controller logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:26 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-679f79d5f8 to 1 deployment-controller logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:29 +0000 UTC Normal Pod myapp-58b4bdc8c9-8g9xc Binding Scheduled Successfully assigned kuttl-test-poetic-bengal/myapp-58b4bdc8c9-8g9xc to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:29 +0000 UTC Warning Pod myapp-58b4bdc8c9-8g9xc FailedMount MountVolume.SetUp failed for volume "simplest-service-ca" : configmap "simplest-service-ca" not found kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:29 +0000 UTC Warning Pod myapp-58b4bdc8c9-8g9xc FailedMount MountVolume.SetUp failed for volume "simplest-trusted-ca" : configmap "simplest-trusted-ca" not found kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:29 +0000 UTC Normal ReplicaSet.apps myapp-58b4bdc8c9 SuccessfulCreate Created pod: myapp-58b4bdc8c9-8g9xc replicaset-controller logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:29 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-58b4bdc8c9 to 1 deployment-controller logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:31 +0000 UTC Normal Pod myapp-679f79d5f8-2j29q.spec.containers{myapp} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 4.9s (4.9s including waiting) kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:31 +0000 UTC Normal Pod myapp-679f79d5f8-2j29q.spec.containers{myapp} Created Created container myapp kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:31 +0000 UTC Normal Pod myapp-679f79d5f8-2j29q.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:34 +0000 UTC Normal Pod simplest-6bcdc5499b-846h2 Binding Scheduled Successfully assigned kuttl-test-poetic-bengal/simplest-6bcdc5499b-846h2 to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:34 +0000 UTC Normal Pod simplest-6bcdc5499b-846h2 AddedInterface Add eth0 [10.128.2.53/23] from ovn-kubernetes logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:34 +0000 UTC Normal Pod simplest-6bcdc5499b-846h2.spec.containers{jaeger} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:34 +0000 UTC Normal ReplicaSet.apps simplest-6bcdc5499b SuccessfulCreate Created pod: simplest-6bcdc5499b-846h2 replicaset-controller logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:34 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-6bcdc5499b to 1 deployment-controller logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:37 +0000 UTC Normal Pod myapp-58b4bdc8c9-8g9xc AddedInterface Add eth0 [10.129.2.43/23] from ovn-kubernetes logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:37 +0000 UTC Normal Pod myapp-58b4bdc8c9-8g9xc.spec.containers{myapp} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:37 +0000 UTC Normal Pod myapp-58b4bdc8c9-8g9xc.spec.containers{myapp} Created Created container myapp kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:37 +0000 UTC Normal Pod myapp-58b4bdc8c9-8g9xc.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:37 +0000 UTC Normal Pod myapp-58b4bdc8c9-8g9xc.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:37 +0000 UTC Normal Pod myapp-58b4bdc8c9-8g9xc.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:37 +0000 UTC Normal Pod myapp-58b4bdc8c9-8g9xc.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:37 +0000 UTC Warning Pod myapp-679f79d5f8-2j29q.spec.containers{myapp} Unhealthy Liveness probe failed: Get "http://10.129.2.42:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:37 +0000 UTC Normal Pod simplest-6bcdc5499b-846h2.spec.containers{jaeger} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" in 2.715s (2.715s including waiting) kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:37 +0000 UTC Normal Pod simplest-6bcdc5499b-846h2.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:37 +0000 UTC Normal Pod simplest-6bcdc5499b-846h2.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:37 +0000 UTC Normal Pod simplest-6bcdc5499b-846h2.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:37 +0000 UTC Normal Pod simplest-6bcdc5499b-846h2.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:37 +0000 UTC Normal Pod simplest-6bcdc5499b-846h2.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:38 +0000 UTC Normal Pod myapp-679f79d5f8-2j29q.spec.containers{myapp} Killing Stopping container myapp kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:38 +0000 UTC Normal ReplicaSet.apps myapp-679f79d5f8 SuccessfulDelete Deleted pod: myapp-679f79d5f8-2j29q replicaset-controller logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:38 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled down replica set myapp-679f79d5f8 to 0 from 1 deployment-controller logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:41 +0000 UTC Normal Pod simplest-6bcdc5499b-846h2.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:41 +0000 UTC Normal Pod simplest-6bcdc5499b-846h2.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:41 +0000 UTC Normal ReplicaSet.apps simplest-6bcdc5499b SuccessfulDelete Deleted pod: simplest-6bcdc5499b-846h2 replicaset-controller logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:41 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-6bcdc5499b to 0 from 1 deployment-controller logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:42 +0000 UTC Normal Pod simplest-77cf5b6b69-qtddp Binding Scheduled Successfully assigned kuttl-test-poetic-bengal/simplest-77cf5b6b69-qtddp to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:42 +0000 UTC Normal Pod simplest-77cf5b6b69-qtddp AddedInterface Add eth0 [10.128.2.54/23] from ovn-kubernetes logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:42 +0000 UTC Normal Pod simplest-77cf5b6b69-qtddp.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:42 +0000 UTC Normal ReplicaSet.apps simplest-77cf5b6b69 SuccessfulCreate Created pod: simplest-77cf5b6b69-qtddp replicaset-controller logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:42 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-77cf5b6b69 to 1 deployment-controller logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:43 +0000 UTC Normal Pod simplest-77cf5b6b69-qtddp.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:43 +0000 UTC Normal Pod simplest-77cf5b6b69-qtddp.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:43 +0000 UTC Normal Pod simplest-77cf5b6b69-qtddp.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:43 +0000 UTC Normal Pod simplest-77cf5b6b69-qtddp.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:43 +0000 UTC Normal Pod simplest-77cf5b6b69-qtddp.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:47 +0000 UTC Normal Pod check-span-jjjp2 Binding Scheduled Successfully assigned kuttl-test-poetic-bengal/check-span-jjjp2 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:47 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-jjjp2 job-controller logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:47 +0000 UTC Normal Pod report-span-rvrdd Binding Scheduled Successfully assigned kuttl-test-poetic-bengal/report-span-rvrdd to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:47 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-rvrdd job-controller logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:48 +0000 UTC Normal Pod check-span-jjjp2 AddedInterface Add eth0 [10.129.2.44/23] from ovn-kubernetes logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:48 +0000 UTC Normal Pod check-span-jjjp2.spec.containers{asserts-container} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:48 +0000 UTC Warning Pod myapp-58b4bdc8c9-8g9xc.spec.containers{myapp} Unhealthy Liveness probe failed: Get "http://10.129.2.43:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:48 +0000 UTC Normal Pod report-span-rvrdd AddedInterface Add eth0 [10.131.0.71/23] from ovn-kubernetes logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:48 +0000 UTC Normal Pod report-span-rvrdd.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:48 +0000 UTC Normal Pod report-span-rvrdd.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:48 +0000 UTC Normal Pod report-span-rvrdd.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:49 +0000 UTC Normal Pod check-span-jjjp2.spec.containers{asserts-container} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" in 1.617s (1.617s including waiting) kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:49 +0000 UTC Normal Pod check-span-jjjp2.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:49 +0000 UTC Normal Pod check-span-jjjp2.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | 2023-12-18 07:07:51 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:07:52 | examples-business-application-injected-sidecar | Deleting namespace: kuttl-test-poetic-bengal === CONT kuttl/harness/examples-all-in-one-with-options logger.go:42: 07:08:06 | examples-all-in-one-with-options | Creating namespace: kuttl-test-tight-griffon logger.go:42: 07:08:06 | examples-all-in-one-with-options/0-install | starting test step 0-install logger.go:42: 07:08:06 | examples-all-in-one-with-options/0-install | Jaeger:kuttl-test-tight-griffon/my-jaeger created logger.go:42: 07:08:12 | examples-all-in-one-with-options/0-install | test step completed 0-install logger.go:42: 07:08:12 | examples-all-in-one-with-options/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:08:12 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:08:13 | examples-all-in-one-with-options/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:08:19 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443/jaeger MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:08:20 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:08:20 | examples-all-in-one-with-options/1-smoke-test | job.batch/report-span created logger.go:42: 07:08:20 | examples-all-in-one-with-options/1-smoke-test | job.batch/check-span created logger.go:42: 07:08:31 | examples-all-in-one-with-options/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:08:31 | examples-all-in-one-with-options | examples-all-in-one-with-options events from ns kuttl-test-tight-griffon: logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:09 +0000 UTC Normal Pod my-jaeger-577d8f6b69-tl5cg Binding Scheduled Successfully assigned kuttl-test-tight-griffon/my-jaeger-577d8f6b69-tl5cg to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:09 +0000 UTC Normal ReplicaSet.apps my-jaeger-577d8f6b69 SuccessfulCreate Created pod: my-jaeger-577d8f6b69-tl5cg replicaset-controller logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:09 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-577d8f6b69 to 1 deployment-controller logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:10 +0000 UTC Normal Pod my-jaeger-577d8f6b69-tl5cg AddedInterface Add eth0 [10.129.2.45/23] from ovn-kubernetes logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:10 +0000 UTC Normal Pod my-jaeger-577d8f6b69-tl5cg.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:10 +0000 UTC Normal Pod my-jaeger-577d8f6b69-tl5cg.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:10 +0000 UTC Normal Pod my-jaeger-577d8f6b69-tl5cg.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:10 +0000 UTC Normal Pod my-jaeger-577d8f6b69-tl5cg.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:10 +0000 UTC Normal Pod my-jaeger-577d8f6b69-tl5cg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:10 +0000 UTC Normal Pod my-jaeger-577d8f6b69-tl5cg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:16 +0000 UTC Normal Pod my-jaeger-577d8f6b69-tl5cg.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:16 +0000 UTC Normal Pod my-jaeger-577d8f6b69-tl5cg.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:16 +0000 UTC Normal ReplicaSet.apps my-jaeger-577d8f6b69 SuccessfulDelete Deleted pod: my-jaeger-577d8f6b69-tl5cg replicaset-controller logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:16 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-577d8f6b69 to 0 from 1 deployment-controller logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:17 +0000 UTC Normal Pod my-jaeger-6cc5c96f54-ckwr8 Binding Scheduled Successfully assigned kuttl-test-tight-griffon/my-jaeger-6cc5c96f54-ckwr8 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:17 +0000 UTC Normal Pod my-jaeger-6cc5c96f54-ckwr8 AddedInterface Add eth0 [10.129.2.46/23] from ovn-kubernetes logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:17 +0000 UTC Normal Pod my-jaeger-6cc5c96f54-ckwr8.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:17 +0000 UTC Normal Pod my-jaeger-6cc5c96f54-ckwr8.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:17 +0000 UTC Normal Pod my-jaeger-6cc5c96f54-ckwr8.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:17 +0000 UTC Normal Pod my-jaeger-6cc5c96f54-ckwr8.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:17 +0000 UTC Normal Pod my-jaeger-6cc5c96f54-ckwr8.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:17 +0000 UTC Normal Pod my-jaeger-6cc5c96f54-ckwr8.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:17 +0000 UTC Normal ReplicaSet.apps my-jaeger-6cc5c96f54 SuccessfulCreate Created pod: my-jaeger-6cc5c96f54-ckwr8 replicaset-controller logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:17 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-6cc5c96f54 to 1 deployment-controller logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:20 +0000 UTC Normal Pod check-span-clk2f Binding Scheduled Successfully assigned kuttl-test-tight-griffon/check-span-clk2f to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:20 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-clk2f job-controller logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:20 +0000 UTC Normal Pod report-span-rbpdc Binding Scheduled Successfully assigned kuttl-test-tight-griffon/report-span-rbpdc to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:20 +0000 UTC Normal Pod report-span-rbpdc AddedInterface Add eth0 [10.131.0.72/23] from ovn-kubernetes logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:20 +0000 UTC Normal Pod report-span-rbpdc.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:20 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-rbpdc job-controller logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:21 +0000 UTC Normal Pod check-span-clk2f AddedInterface Add eth0 [10.128.2.55/23] from ovn-kubernetes logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:21 +0000 UTC Normal Pod check-span-clk2f.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:21 +0000 UTC Normal Pod check-span-clk2f.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:21 +0000 UTC Normal Pod check-span-clk2f.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:21 +0000 UTC Normal Pod report-span-rbpdc.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:21 +0000 UTC Normal Pod report-span-rbpdc.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:08:31 | examples-all-in-one-with-options | 2023-12-18 07:08:31 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:08:31 | examples-all-in-one-with-options | Deleting namespace: kuttl-test-tight-griffon === CONT kuttl/harness/examples-agent-with-priority-class logger.go:42: 07:08:44 | examples-agent-with-priority-class | Creating namespace: kuttl-test-mature-honeybee logger.go:42: 07:08:44 | examples-agent-with-priority-class/0-install | starting test step 0-install logger.go:42: 07:08:44 | examples-agent-with-priority-class/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 07:08:44 | examples-agent-with-priority-class/0-install | ServiceAccount:kuttl-test-mature-honeybee/jaeger-agent-daemonset created logger.go:42: 07:08:44 | examples-agent-with-priority-class/0-install | test step completed 0-install logger.go:42: 07:08:44 | examples-agent-with-priority-class/1-add-policy | starting test step 1-add-policy logger.go:42: 07:08:44 | examples-agent-with-priority-class/1-add-policy | running command: [sh -c oc adm policy --namespace $NAMESPACE add-scc-to-user daemonset-with-hostport -z jaeger-agent-daemonset] logger.go:42: 07:08:44 | examples-agent-with-priority-class/1-add-policy | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:daemonset-with-hostport added: "jaeger-agent-daemonset" logger.go:42: 07:08:44 | examples-agent-with-priority-class/1-add-policy | running command: [sh -c sleep 5] logger.go:42: 07:08:49 | examples-agent-with-priority-class/1-add-policy | test step completed 1-add-policy logger.go:42: 07:08:49 | examples-agent-with-priority-class/2-install | starting test step 2-install logger.go:42: 07:08:49 | examples-agent-with-priority-class/2-install | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 07:08:56 | examples-agent-with-priority-class/2-install | Error from server (NotFound): deployments.apps "agent-as-daemonset" not found logger.go:42: 07:08:56 | examples-agent-with-priority-class/2-install | command failure, skipping 2 additional commands logger.go:42: 07:08:56 | examples-agent-with-priority-class/2-install | PriorityClass:/high-priority created logger.go:42: 07:08:56 | examples-agent-with-priority-class/2-install | Jaeger:kuttl-test-mature-honeybee/agent-as-daemonset updated case.go:364: failed in step 2-install case.go:366: exit status 1 logger.go:42: 07:08:56 | examples-agent-with-priority-class | examples-agent-with-priority-class events from ns kuttl-test-mature-honeybee: logger.go:42: 07:08:56 | examples-agent-with-priority-class | Deleting namespace: kuttl-test-mature-honeybee === CONT kuttl/harness/examples-agent-as-daemonset logger.go:42: 07:09:03 | examples-agent-as-daemonset | Creating namespace: kuttl-test-up-bullfrog logger.go:42: 07:09:03 | examples-agent-as-daemonset/0-install | starting test step 0-install logger.go:42: 07:09:03 | examples-agent-as-daemonset/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 07:09:03 | examples-agent-as-daemonset/0-install | ServiceAccount:kuttl-test-up-bullfrog/jaeger-agent-daemonset created logger.go:42: 07:09:03 | examples-agent-as-daemonset/0-install | test step completed 0-install logger.go:42: 07:09:03 | examples-agent-as-daemonset/1-add-policy | starting test step 1-add-policy logger.go:42: 07:09:03 | examples-agent-as-daemonset/1-add-policy | running command: [sh -c oc adm policy --namespace $NAMESPACE add-scc-to-user daemonset-with-hostport -z jaeger-agent-daemonset] logger.go:42: 07:09:03 | examples-agent-as-daemonset/1-add-policy | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:daemonset-with-hostport added: "jaeger-agent-daemonset" logger.go:42: 07:09:03 | examples-agent-as-daemonset/1-add-policy | running command: [sh -c sleep 5] logger.go:42: 07:09:08 | examples-agent-as-daemonset/1-add-policy | test step completed 1-add-policy logger.go:42: 07:09:08 | examples-agent-as-daemonset/2-install | starting test step 2-install logger.go:42: 07:09:08 | examples-agent-as-daemonset/2-install | Jaeger:kuttl-test-up-bullfrog/agent-as-daemonset created logger.go:42: 07:09:08 | examples-agent-as-daemonset/2-install | test step completed 2-install logger.go:42: 07:09:08 | examples-agent-as-daemonset/3- | starting test step 3- logger.go:42: 07:19:09 | examples-agent-as-daemonset/3- | test step failed 3- case.go:364: failed in step 3- case.go:366: --- DaemonSet:kuttl-test-up-bullfrog/agent-as-daemonset-agent-daemonset +++ DaemonSet:kuttl-test-up-bullfrog/agent-as-daemonset-agent-daemonset @@ -1,8 +1,310 @@ apiVersion: apps/v1 kind: DaemonSet metadata: + annotations: + deprecated.daemonset.template.generation: "1" + labels: + app: jaeger + app.kubernetes.io/component: agent + app.kubernetes.io/instance: agent-as-daemonset + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: agent-as-daemonset-agent + app.kubernetes.io/part-of: jaeger + managedFields: + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:deprecated.daemonset.template.generation: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:ownerReferences: + .: {} + k:{"uid":"ee3cf110-4a75-462a-8e1a-1d4eef5c716b"}: {} + f:spec: + f:revisionHistoryLimit: {} + f:selector: {} + f:template: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.istio.io/inject: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:spec: + f:containers: + k:{"name":"jaeger-agent-daemonset"}: + .: {} + f:args: {} + f:image: {} + f:imagePullPolicy: {} + f:livenessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":5775,"protocol":"UDP"}: + .: {} + f:containerPort: {} + f:hostPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":5778,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:hostPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":6831,"protocol":"UDP"}: + .: {} + f:containerPort: {} + f:hostPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":6832,"protocol":"UDP"}: + .: {} + f:containerPort: {} + f:hostPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14271,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:hostPort: {} + f:name: {} + f:protocol: {} + f:readinessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/pki/ca-trust/source/service-ca"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + f:dnsPolicy: {} + f:enableServiceLinks: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:serviceAccount: {} + f:serviceAccountName: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"agent-as-daemonset-service-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"agent-as-daemonset-trusted-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + f:updateStrategy: + f:rollingUpdate: + .: {} + f:maxSurge: {} + f:maxUnavailable: {} + f:type: {} + manager: jaeger-operator + operation: Update + time: "2023-12-18T07:09:12Z" + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:status: + f:currentNumberScheduled: {} + f:desiredNumberScheduled: {} + f:numberAvailable: {} + f:numberReady: {} + f:observedGeneration: {} + f:updatedNumberScheduled: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2023-12-18T07:09:15Z" name: agent-as-daemonset-agent-daemonset namespace: kuttl-test-up-bullfrog + ownerReferences: + - apiVersion: jaegertracing.io/v1 + controller: true + kind: Jaeger + name: agent-as-daemonset + uid: ee3cf110-4a75-462a-8e1a-1d4eef5c716b +spec: + revisionHistoryLimit: 10 + selector: + matchLabels: + app: jaeger + app.kubernetes.io/component: agent + app.kubernetes.io/instance: agent-as-daemonset + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: agent-as-daemonset-agent + app.kubernetes.io/part-of: jaeger + template: + metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "14271" + prometheus.io/scrape: "true" + sidecar.istio.io/inject: "false" + creationTimestamp: null + labels: + app: jaeger + app.kubernetes.io/component: agent + app.kubernetes.io/instance: agent-as-daemonset + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: agent-as-daemonset-agent + app.kubernetes.io/part-of: jaeger + spec: + containers: + - args: + - --log-level=debug + - --reporter.grpc.host-port=dns:///agent-as-daemonset-collector-headless.kuttl-test-up-bullfrog:14250 + - --reporter.grpc.tls.ca=/etc/pki/ca-trust/source/service-ca/service-ca.crt + - --reporter.grpc.tls.enabled=true + - --reporter.grpc.tls.server-name=agent-as-daemonset-collector-headless.kuttl-test-up-bullfrog.svc.cluster.local + image: registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 14271 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-agent-daemonset + ports: + - containerPort: 5775 + hostPort: 5775 + name: zk-compact-trft + protocol: UDP + - containerPort: 5778 + hostPort: 5778 + name: config-rest + protocol: TCP + - containerPort: 6831 + hostPort: 6831 + name: jg-compact-trft + protocol: UDP + - containerPort: 6832 + hostPort: 6832 + name: jg-binary-trft + protocol: UDP + - containerPort: 14271 + hostPort: 14271 + name: admin-http + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 14271 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/pki/ca-trust/extracted/pem + name: agent-as-daemonset-trusted-ca + readOnly: true + - mountPath: /etc/pki/ca-trust/source/service-ca + name: agent-as-daemonset-service-ca + readOnly: true + dnsPolicy: ClusterFirst + enableServiceLinks: false + restartPolicy: Always + schedulerName: default-scheduler + securityContext: {} + serviceAccount: jaeger-agent-daemonset + serviceAccountName: jaeger-agent-daemonset + terminationGracePeriodSeconds: 30 + volumes: + - configMap: + defaultMode: 420 + items: + - key: ca-bundle.crt + path: tls-ca-bundle.pem + name: agent-as-daemonset-trusted-ca + name: agent-as-daemonset-trusted-ca + - configMap: + defaultMode: 420 + items: + - key: service-ca.crt + path: service-ca.crt + name: agent-as-daemonset-service-ca + name: agent-as-daemonset-service-ca + updateStrategy: + rollingUpdate: + maxSurge: 0 + maxUnavailable: 1 + type: RollingUpdate status: - numberReady: 1 + currentNumberScheduled: 3 + desiredNumberScheduled: 3 + numberAvailable: 3 + numberMisscheduled: 0 + numberReady: 3 + observedGeneration: 1 + updatedNumberScheduled: 3 case.go:366: resource DaemonSet:kuttl-test-up-bullfrog/agent-as-daemonset-agent-daemonset: .status.numberReady: value mismatch, expected: 1 != actual: 3 logger.go:42: 07:19:09 | examples-agent-as-daemonset | examples-agent-as-daemonset events from ns kuttl-test-up-bullfrog: logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:12 +0000 UTC Normal Pod agent-as-daemonset-74577897bf-zh7gx Binding Scheduled Successfully assigned kuttl-test-up-bullfrog/agent-as-daemonset-74577897bf-zh7gx to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:12 +0000 UTC Normal Pod agent-as-daemonset-74577897bf-zh7gx AddedInterface Add eth0 [10.129.2.47/23] from ovn-kubernetes logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:12 +0000 UTC Normal Pod agent-as-daemonset-74577897bf-zh7gx.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:12 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-74577897bf SuccessfulCreate Created pod: agent-as-daemonset-74577897bf-zh7gx replicaset-controller logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:12 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-8tz9r Binding Scheduled Successfully assigned kuttl-test-up-bullfrog/agent-as-daemonset-agent-daemonset-8tz9r to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:12 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-8tz9r AddedInterface Add eth0 [10.131.0.73/23] from ovn-kubernetes logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:12 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-8tz9r.spec.containers{jaeger-agent-daemonset} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:12 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-ff7jw Binding Scheduled Successfully assigned kuttl-test-up-bullfrog/agent-as-daemonset-agent-daemonset-ff7jw to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:12 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-xh2lh Binding Scheduled Successfully assigned kuttl-test-up-bullfrog/agent-as-daemonset-agent-daemonset-xh2lh to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:12 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-ff7jw daemonset-controller logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:12 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-xh2lh daemonset-controller logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:12 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-8tz9r daemonset-controller logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:12 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-74577897bf to 1 deployment-controller logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:13 +0000 UTC Normal Pod agent-as-daemonset-74577897bf-zh7gx.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:13 +0000 UTC Normal Pod agent-as-daemonset-74577897bf-zh7gx.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:13 +0000 UTC Normal Pod agent-as-daemonset-74577897bf-zh7gx.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:13 +0000 UTC Normal Pod agent-as-daemonset-74577897bf-zh7gx.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:13 +0000 UTC Normal Pod agent-as-daemonset-74577897bf-zh7gx.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:13 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-8tz9r.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:13 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-8tz9r.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:13 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-ff7jw AddedInterface Add eth0 [10.129.2.48/23] from ovn-kubernetes logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:13 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-ff7jw.spec.containers{jaeger-agent-daemonset} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:13 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-ff7jw.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:13 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-ff7jw.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:13 +0000 UTC Warning Pod agent-as-daemonset-agent-daemonset-xh2lh FailedMount MountVolume.SetUp failed for volume "agent-as-daemonset-trusted-ca" : failed to sync configmap cache: timed out waiting for the condition kubelet logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:13 +0000 UTC Warning Pod agent-as-daemonset-agent-daemonset-xh2lh FailedMount MountVolume.SetUp failed for volume "kube-api-access-q4hq5" : failed to sync configmap cache: timed out waiting for the condition kubelet logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:14 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-xh2lh AddedInterface Add eth0 [10.128.2.56/23] from ovn-kubernetes logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:14 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-xh2lh.spec.containers{jaeger-agent-daemonset} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:14 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-xh2lh.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 07:19:09 | examples-agent-as-daemonset | 2023-12-18 07:09:14 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-xh2lh.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 07:19:09 | examples-agent-as-daemonset | Deleting namespace: kuttl-test-up-bullfrog === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- FAIL: kuttl (1263.17s) --- FAIL: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.17s) --- PASS: kuttl/harness/examples-with-sampling (56.19s) --- PASS: kuttl/harness/examples-with-cassandra (96.02s) --- PASS: kuttl/harness/examples-with-badger-and-volume (35.43s) --- PASS: kuttl/harness/examples-with-badger (39.73s) --- PASS: kuttl/harness/examples-simplest (39.81s) --- PASS: kuttl/harness/examples-simple-prod-with-volumes (91.97s) --- PASS: kuttl/harness/examples-simple-prod (71.56s) --- PASS: kuttl/harness/examples-service-types (54.01s) --- PASS: kuttl/harness/examples-openshift-with-htpasswd (23.34s) --- PASS: kuttl/harness/examples-collector-with-priority-class (38.07s) --- PASS: kuttl/harness/examples-business-application-injected-sidecar (40.84s) --- PASS: kuttl/harness/examples-all-in-one-with-options (38.09s) --- FAIL: kuttl/harness/examples-agent-with-priority-class (19.09s) --- FAIL: kuttl/harness/examples-agent-as-daemonset (612.79s) FAIL + exit_code=1 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name examples --report --output /logs/artifacts/examples.xml ./artifacts/kuttl-report.xml time="2023-12-18T07:19:16Z" level=debug msg="Setting a new name for the test suites" time="2023-12-18T07:19:16Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-18T07:19:16Z" level=debug msg="normalizing test case names" time="2023-12-18T07:19:16Z" level=debug msg="examples/artifacts -> examples_artifacts" time="2023-12-18T07:19:16Z" level=debug msg="examples/examples-with-sampling -> examples_examples_with_sampling" time="2023-12-18T07:19:16Z" level=debug msg="examples/examples-with-cassandra -> examples_examples_with_cassandra" time="2023-12-18T07:19:16Z" level=debug msg="examples/examples-with-badger-and-volume -> examples_examples_with_badger_and_volume" time="2023-12-18T07:19:16Z" level=debug msg="examples/examples-with-badger -> examples_examples_with_badger" time="2023-12-18T07:19:16Z" level=debug msg="examples/examples-simplest -> examples_examples_simplest" time="2023-12-18T07:19:16Z" level=debug msg="examples/examples-simple-prod-with-volumes -> examples_examples_simple_prod_with_volumes" time="2023-12-18T07:19:16Z" level=debug msg="examples/examples-simple-prod -> examples_examples_simple_prod" time="2023-12-18T07:19:16Z" level=debug msg="examples/examples-service-types -> examples_examples_service_types" time="2023-12-18T07:19:16Z" level=debug msg="examples/examples-openshift-with-htpasswd -> examples_examples_openshift_with_htpasswd" time="2023-12-18T07:19:16Z" level=debug msg="examples/examples-collector-with-priority-class -> examples_examples_collector_with_priority_class" time="2023-12-18T07:19:16Z" level=debug msg="examples/examples-business-application-injected-sidecar -> examples_examples_business_application_injected_sidecar" time="2023-12-18T07:19:16Z" level=debug msg="examples/examples-all-in-one-with-options -> examples_examples_all_in_one_with_options" time="2023-12-18T07:19:16Z" level=debug msg="examples/examples-agent-with-priority-class -> examples_examples_agent_with_priority_class" time="2023-12-18T07:19:16Z" level=debug msg="examples/examples-agent-as-daemonset -> examples_examples_agent_as_daemonset" +---------------------------------------------------------+--------+ | NAME | RESULT | +---------------------------------------------------------+--------+ | examples_artifacts | passed | | examples_examples_with_sampling | passed | | examples_examples_with_cassandra | passed | | examples_examples_with_badger_and_volume | passed | | examples_examples_with_badger | passed | | examples_examples_simplest | passed | | examples_examples_simple_prod_with_volumes | passed | | examples_examples_simple_prod | passed | | examples_examples_service_types | passed | | examples_examples_openshift_with_htpasswd | passed | | examples_examples_collector_with_priority_class | passed | | examples_examples_business_application_injected_sidecar | passed | | examples_examples_all_in_one_with_options | passed | | examples_examples_agent_with_priority_class | failed | | examples_examples_agent_as_daemonset | failed | +---------------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh generate false true + '[' 3 -ne 3 ']' + test_suite_name=generate + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/generate.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-generate make[2]: Entering directory '/tmp/jaeger-tests' test -s /tmp/jaeger-tests/bin/operator-sdk || curl -sLo /tmp/jaeger-tests/bin/operator-sdk https://github.com/operator-framework/operator-sdk/releases/download/v1.32.0/operator-sdk_`go env GOOS`_`go env GOARCH` ./hack/install/install-golangci-lint.sh Installing golangci-lint golangci-lint 1.55.2 is installed already ./hack/install/install-goimports.sh Installing goimports Try 0... go install golang.org/x/tools/cmd/goimports@v0.1.12 >>>> Formatting code... ./.ci/format.sh >>>> Building... ./hack/install/install-dependencies.sh Installing go dependencies Try 0... go mod download GOOS= GOARCH= CGO_ENABLED=0 GO111MODULE=on go build -ldflags "-X "github.com/jaegertracing/jaeger-operator/pkg/version".version="1.52.0" -X "github.com/jaegertracing/jaeger-operator/pkg/version".buildDate=2023-12-18T07:19:17Z -X "github.com/jaegertracing/jaeger-operator/pkg/version".defaultJaeger="1.52.0"" -o "bin/jaeger-operator" main.go JAEGER_VERSION="1.52.0" ./tests/e2e/generate/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-17-173511 True False 41m Cluster version is 4.15.0-0.nightly-2023-12-17-173511' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-17-173511 True False 41m Cluster version is 4.15.0-0.nightly-2023-12-17-173511' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/generate/render.sh ++ export SUITE_DIR=./tests/e2e/generate ++ SUITE_DIR=./tests/e2e/generate ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/generate ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + '[' true = true ']' + skip_test generate 'This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 2 -ne 2 ']' + test_name=generate + message='This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/generate/_build + '[' _build '!=' _build ']' + rm -rf generate + warning 'generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed\e[0m' WAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running generate E2E tests' Running generate E2E tests + cd tests/e2e/generate/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3803946269 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 07:19:34 | artifacts | Creating namespace: kuttl-test-engaging-turtle logger.go:42: 07:19:35 | artifacts | artifacts events from ns kuttl-test-engaging-turtle: logger.go:42: 07:19:35 | artifacts | Deleting namespace: kuttl-test-engaging-turtle === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (6.19s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.14s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name generate --report --output /logs/artifacts/generate.xml ./artifacts/kuttl-report.xml time="2023-12-18T07:19:41Z" level=debug msg="Setting a new name for the test suites" time="2023-12-18T07:19:41Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-18T07:19:41Z" level=debug msg="normalizing test case names" time="2023-12-18T07:19:41Z" level=debug msg="generate/artifacts -> generate_artifacts" +--------------------+--------+ | NAME | RESULT | +--------------------+--------+ | generate_artifacts | passed | +--------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh miscellaneous false true + '[' 3 -ne 3 ']' + test_suite_name=miscellaneous + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/miscellaneous.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-miscellaneous make[2]: Entering directory '/tmp/jaeger-tests' SKIP_ES_EXTERNAL=true ./tests/e2e/miscellaneous/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-17-173511 True False 41m Cluster version is 4.15.0-0.nightly-2023-12-17-173511' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-17-173511 True False 41m Cluster version is 4.15.0-0.nightly-2023-12-17-173511' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/miscellaneous/render.sh ++ export SUITE_DIR=./tests/e2e/miscellaneous ++ SUITE_DIR=./tests/e2e/miscellaneous ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/miscellaneous ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + '[' true = true ']' + skip_test cassandra-spark 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=cassandra-spark + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + rm -rf cassandra-spark + warning 'cassandra-spark: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: cassandra-spark: Test not supported in OpenShift\e[0m' WAR: cassandra-spark: Test not supported in OpenShift + start_test collector-autoscale + '[' 1 -ne 1 ']' + test_name=collector-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-autoscale\e[0m' Rendering files for test collector-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p collector-autoscale + cd collector-autoscale + jaeger_name=simple-prod + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + ELASTICSEARCH_NODECOUNT=1 + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.resources.requests.memory="200m"' 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.autoscale=true 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.minReplicas=1 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.maxReplicas=2 01-install.yaml + version_lt 1.27 1.23 ++ echo 1.27 1.23 ++ tr ' ' '\n' ++ sort -rV ++ head -n 1 + test 1.27 '!=' 1.27 + rm ./03-assert.yaml + generate_otlp_e2e_tests http + test_protocol=http + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-http\e[0m' Rendering files for test collector-otlp-allinone-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-autoscale + '[' collector-autoscale '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-http + cd collector-otlp-allinone-http + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger http true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-http\e[0m' Rendering files for test collector-otlp-production-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-http + '[' collector-otlp-allinone-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-http + cd collector-otlp-production-http + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger http true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + generate_otlp_e2e_tests grpc + test_protocol=grpc + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-grpc\e[0m' Rendering files for test collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-http + '[' collector-otlp-production-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-grpc + cd collector-otlp-allinone-grpc + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-grpc\e[0m' Rendering files for test collector-otlp-production-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-grpc + '[' collector-otlp-allinone-grpc '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-grpc + cd collector-otlp-production-grpc + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + '[' true = true ']' + skip_test istio 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=istio + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-grpc + '[' collector-otlp-production-grpc '!=' _build ']' + cd .. + rm -rf istio + warning 'istio: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: istio: Test not supported in OpenShift\e[0m' WAR: istio: Test not supported in OpenShift + '[' true = true ']' + skip_test outside-cluster 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=outside-cluster + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + rm -rf outside-cluster + warning 'outside-cluster: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: outside-cluster: Test not supported in OpenShift\e[0m' WAR: outside-cluster: Test not supported in OpenShift + start_test set-custom-img + '[' 1 -ne 1 ']' + test_name=set-custom-img + echo =========================================================================== =========================================================================== + info 'Rendering files for test set-custom-img' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test set-custom-img\e[0m' Rendering files for test set-custom-img + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p set-custom-img + cd set-custom-img + jaeger_name=my-jaeger + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.image="test"' ./02-install.yaml + '[' true = true ']' + skip_test non-cluster-wide 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=non-cluster-wide + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/set-custom-img + '[' set-custom-img '!=' _build ']' + cd .. + rm -rf non-cluster-wide + warning 'non-cluster-wide: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: non-cluster-wide: Test not supported in OpenShift\e[0m' WAR: non-cluster-wide: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running miscellaneous E2E tests' Running miscellaneous E2E tests + cd tests/e2e/miscellaneous/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3803946269 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 7 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/collector-autoscale === PAUSE kuttl/harness/collector-autoscale === RUN kuttl/harness/collector-otlp-allinone-grpc === PAUSE kuttl/harness/collector-otlp-allinone-grpc === RUN kuttl/harness/collector-otlp-allinone-http === PAUSE kuttl/harness/collector-otlp-allinone-http === RUN kuttl/harness/collector-otlp-production-grpc === PAUSE kuttl/harness/collector-otlp-production-grpc === RUN kuttl/harness/collector-otlp-production-http === PAUSE kuttl/harness/collector-otlp-production-http === RUN kuttl/harness/set-custom-img === PAUSE kuttl/harness/set-custom-img === CONT kuttl/harness/artifacts logger.go:42: 07:19:52 | artifacts | Creating namespace: kuttl-test-holy-airedale logger.go:42: 07:19:52 | artifacts | artifacts events from ns kuttl-test-holy-airedale: logger.go:42: 07:19:52 | artifacts | Deleting namespace: kuttl-test-holy-airedale === CONT kuttl/harness/collector-otlp-production-grpc logger.go:42: 07:19:58 | collector-otlp-production-grpc | Creating namespace: kuttl-test-flying-gnu logger.go:42: 07:19:58 | collector-otlp-production-grpc/1-install | starting test step 1-install logger.go:42: 07:19:58 | collector-otlp-production-grpc/1-install | Jaeger:kuttl-test-flying-gnu/my-jaeger created logger.go:42: 07:20:34 | collector-otlp-production-grpc/1-install | test step completed 1-install logger.go:42: 07:20:34 | collector-otlp-production-grpc/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:20:34 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:20:35 | collector-otlp-production-grpc/2-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:20:42 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c REPORTING_PROTOCOL=grpc ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 07:20:42 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:20:43 | collector-otlp-production-grpc/2-smoke-test | job.batch/report-span created logger.go:42: 07:20:43 | collector-otlp-production-grpc/2-smoke-test | job.batch/check-span created logger.go:42: 07:21:02 | collector-otlp-production-grpc/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:21:02 | collector-otlp-production-grpc | collector-otlp-production-grpc events from ns kuttl-test-flying-gnu: logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:04 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestflyinggnumyjaeger-1-5bd9649cd9-rkt69 Binding Scheduled Successfully assigned kuttl-test-flying-gnu/elasticsearch-cdm-kuttltestflyinggnumyjaeger-1-5bd9649cd9-rkt69 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:04 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestflyinggnumyjaeger-1-5bd9649cd9 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestflyinggnumyjaeger-1-5bd9649cd9-rkt69 replicaset-controller logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:04 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestflyinggnumyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestflyinggnumyjaeger-1-5bd9649cd9 to 1 deployment-controller logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestflyinggnumyjaeger-1-5bd9649cd9-rkt69 AddedInterface Add eth0 [10.129.2.49/23] from ovn-kubernetes logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestflyinggnumyjaeger-1-5bd9649cd9-rkt69.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:4587958c428b40ac31b46a96a752c2b338814895891023b3ba96ce4c12b5906d" already present on machine kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestflyinggnumyjaeger-1-5bd9649cd9-rkt69.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestflyinggnumyjaeger-1-5bd9649cd9-rkt69.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestflyinggnumyjaeger-1-5bd9649cd9-rkt69.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:8c14e8afb359f1b4ea4b8ed370d15d3b0c272b39ba3b90e5b314f3ccb5ac69c5" already present on machine kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestflyinggnumyjaeger-1-5bd9649cd9-rkt69.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestflyinggnumyjaeger-1-5bd9649cd9-rkt69.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:15 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestflyinggnumyjaeger-1-5bd9649cd9-rkt69.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:20 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestflyinggnumyjaeger-1-5bd9649cd9-rkt69.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:31 +0000 UTC Normal Pod my-jaeger-collector-778797f5c9-dpm24 Binding Scheduled Successfully assigned kuttl-test-flying-gnu/my-jaeger-collector-778797f5c9-dpm24 to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:31 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-778797f5c9 SuccessfulCreate Created pod: my-jaeger-collector-778797f5c9-dpm24 replicaset-controller logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:31 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-778797f5c9 to 1 deployment-controller logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:31 +0000 UTC Normal Pod my-jaeger-query-78bfbb76d6-zc45b Binding Scheduled Successfully assigned kuttl-test-flying-gnu/my-jaeger-query-78bfbb76d6-zc45b to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:31 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-78bfbb76d6 SuccessfulCreate Created pod: my-jaeger-query-78bfbb76d6-zc45b replicaset-controller logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:31 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-78bfbb76d6 to 1 deployment-controller logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:32 +0000 UTC Normal Pod my-jaeger-collector-778797f5c9-dpm24 AddedInterface Add eth0 [10.128.2.57/23] from ovn-kubernetes logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:32 +0000 UTC Normal Pod my-jaeger-collector-778797f5c9-dpm24.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:32 +0000 UTC Normal Pod my-jaeger-collector-778797f5c9-dpm24.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:32 +0000 UTC Normal Pod my-jaeger-collector-778797f5c9-dpm24.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:32 +0000 UTC Normal Pod my-jaeger-query-78bfbb76d6-zc45b AddedInterface Add eth0 [10.131.0.75/23] from ovn-kubernetes logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:32 +0000 UTC Normal Pod my-jaeger-query-78bfbb76d6-zc45b.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:32 +0000 UTC Normal Pod my-jaeger-query-78bfbb76d6-zc45b.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:32 +0000 UTC Normal Pod my-jaeger-query-78bfbb76d6-zc45b.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:32 +0000 UTC Normal Pod my-jaeger-query-78bfbb76d6-zc45b.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:32 +0000 UTC Normal Pod my-jaeger-query-78bfbb76d6-zc45b.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:32 +0000 UTC Normal Pod my-jaeger-query-78bfbb76d6-zc45b.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:32 +0000 UTC Normal Pod my-jaeger-query-78bfbb76d6-zc45b.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:32 +0000 UTC Normal Pod my-jaeger-query-78bfbb76d6-zc45b.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:32 +0000 UTC Normal Pod my-jaeger-query-78bfbb76d6-zc45b.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:38 +0000 UTC Normal Pod my-jaeger-query-78bfbb76d6-zc45b.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:38 +0000 UTC Normal Pod my-jaeger-query-78bfbb76d6-zc45b.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:38 +0000 UTC Normal Pod my-jaeger-query-78bfbb76d6-zc45b.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:38 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-78bfbb76d6 SuccessfulDelete Deleted pod: my-jaeger-query-78bfbb76d6-zc45b replicaset-controller logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:38 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-78bfbb76d6 to 0 from 1 deployment-controller logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:39 +0000 UTC Normal Pod my-jaeger-query-598bf9cbbb-gqvdf Binding Scheduled Successfully assigned kuttl-test-flying-gnu/my-jaeger-query-598bf9cbbb-gqvdf to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:39 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-598bf9cbbb SuccessfulCreate Created pod: my-jaeger-query-598bf9cbbb-gqvdf replicaset-controller logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:39 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-598bf9cbbb to 1 deployment-controller logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:40 +0000 UTC Normal Pod my-jaeger-query-598bf9cbbb-gqvdf AddedInterface Add eth0 [10.131.0.76/23] from ovn-kubernetes logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:40 +0000 UTC Normal Pod my-jaeger-query-598bf9cbbb-gqvdf.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:40 +0000 UTC Normal Pod my-jaeger-query-598bf9cbbb-gqvdf.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:40 +0000 UTC Normal Pod my-jaeger-query-598bf9cbbb-gqvdf.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:40 +0000 UTC Normal Pod my-jaeger-query-598bf9cbbb-gqvdf.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:40 +0000 UTC Normal Pod my-jaeger-query-598bf9cbbb-gqvdf.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:40 +0000 UTC Normal Pod my-jaeger-query-598bf9cbbb-gqvdf.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:40 +0000 UTC Normal Pod my-jaeger-query-598bf9cbbb-gqvdf.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:40 +0000 UTC Normal Pod my-jaeger-query-598bf9cbbb-gqvdf.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:40 +0000 UTC Normal Pod my-jaeger-query-598bf9cbbb-gqvdf.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:43 +0000 UTC Normal Pod check-span-shx6t Binding Scheduled Successfully assigned kuttl-test-flying-gnu/check-span-shx6t to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:43 +0000 UTC Normal Pod check-span-shx6t AddedInterface Add eth0 [10.128.2.59/23] from ovn-kubernetes logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:43 +0000 UTC Normal Pod check-span-shx6t.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:43 +0000 UTC Normal Pod check-span-shx6t.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:43 +0000 UTC Normal Pod check-span-shx6t.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:43 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-shx6t job-controller logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:43 +0000 UTC Normal Pod report-span-2z9vp Binding Scheduled Successfully assigned kuttl-test-flying-gnu/report-span-2z9vp to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:43 +0000 UTC Normal Pod report-span-2z9vp AddedInterface Add eth0 [10.128.2.58/23] from ovn-kubernetes logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:43 +0000 UTC Normal Pod report-span-2z9vp.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:43 +0000 UTC Normal Pod report-span-2z9vp.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:43 +0000 UTC Normal Pod report-span-2z9vp.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:43 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-2z9vp job-controller logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:46 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:46 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:20:46 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:21:02 | collector-otlp-production-grpc | 2023-12-18 07:21:02 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:21:02 | collector-otlp-production-grpc | Deleting namespace: kuttl-test-flying-gnu === CONT kuttl/harness/set-custom-img logger.go:42: 07:21:15 | set-custom-img | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:21:15 | set-custom-img | Ignoring check-collector-img.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:21:15 | set-custom-img | Creating namespace: kuttl-test-moving-pangolin logger.go:42: 07:21:15 | set-custom-img/1-install | starting test step 1-install logger.go:42: 07:21:15 | set-custom-img/1-install | Jaeger:kuttl-test-moving-pangolin/my-jaeger created logger.go:42: 07:21:50 | set-custom-img/1-install | test step completed 1-install logger.go:42: 07:21:50 | set-custom-img/2-install | starting test step 2-install logger.go:42: 07:21:50 | set-custom-img/2-install | Jaeger:kuttl-test-moving-pangolin/my-jaeger updated logger.go:42: 07:21:50 | set-custom-img/2-install | test step completed 2-install logger.go:42: 07:21:50 | set-custom-img/3-check-image | starting test step 3-check-image logger.go:42: 07:21:50 | set-custom-img/3-check-image | running command: [sh -c ./check-collector-img.sh] logger.go:42: 07:21:50 | set-custom-img/3-check-image | Collector image missmatch. Expected: test. Has: registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9 logger.go:42: 07:21:56 | set-custom-img/3-check-image | Collector image asserted properly! logger.go:42: 07:21:56 | set-custom-img/3-check-image | test step completed 3-check-image logger.go:42: 07:21:56 | set-custom-img | set-custom-img events from ns kuttl-test-moving-pangolin: logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:21 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestmovingpangolinmyjaeger-1-797f45659c SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestmovingpangolinmyjaeger-1-797f457424h replicaset-controller logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:21 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmovingpangolinmyjaeger-1-797f457424h Binding Scheduled Successfully assigned kuttl-test-moving-pangolin/elasticsearch-cdm-kuttltestmovingpangolinmyjaeger-1-797f457424h to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:21 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestmovingpangolinmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestmovingpangolinmyjaeger-1-797f45659c to 1 deployment-controller logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmovingpangolinmyjaeger-1-797f457424h AddedInterface Add eth0 [10.129.2.50/23] from ovn-kubernetes logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmovingpangolinmyjaeger-1-797f457424h.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:4587958c428b40ac31b46a96a752c2b338814895891023b3ba96ce4c12b5906d" already present on machine kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmovingpangolinmyjaeger-1-797f457424h.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmovingpangolinmyjaeger-1-797f457424h.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmovingpangolinmyjaeger-1-797f457424h.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:8c14e8afb359f1b4ea4b8ed370d15d3b0c272b39ba3b90e5b314f3ccb5ac69c5" already present on machine kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmovingpangolinmyjaeger-1-797f457424h.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmovingpangolinmyjaeger-1-797f457424h.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:37 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestmovingpangolinmyjaeger-1-797f457424h.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:48 +0000 UTC Normal Pod my-jaeger-collector-8c47644c8-s7h4n Binding Scheduled Successfully assigned kuttl-test-moving-pangolin/my-jaeger-collector-8c47644c8-s7h4n to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:48 +0000 UTC Warning Pod my-jaeger-collector-8c47644c8-s7h4n FailedMount MountVolume.SetUp failed for volume "my-jaeger-collector-tls-config-volume" : secret "my-jaeger-collector-headless-tls" not found kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:48 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-8c47644c8 SuccessfulCreate Created pod: my-jaeger-collector-8c47644c8-s7h4n replicaset-controller logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:48 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-8c47644c8 to 1 deployment-controller logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:48 +0000 UTC Normal Pod my-jaeger-query-6c85d8d98f-djvcv Binding Scheduled Successfully assigned kuttl-test-moving-pangolin/my-jaeger-query-6c85d8d98f-djvcv to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:48 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-6c85d8d98f SuccessfulCreate Created pod: my-jaeger-query-6c85d8d98f-djvcv replicaset-controller logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:48 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-6c85d8d98f to 1 deployment-controller logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:49 +0000 UTC Normal Pod my-jaeger-collector-8c47644c8-s7h4n AddedInterface Add eth0 [10.128.2.60/23] from ovn-kubernetes logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:49 +0000 UTC Normal Pod my-jaeger-collector-8c47644c8-s7h4n.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:49 +0000 UTC Normal Pod my-jaeger-collector-8c47644c8-s7h4n.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:49 +0000 UTC Normal Pod my-jaeger-collector-8c47644c8-s7h4n.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:49 +0000 UTC Normal Pod my-jaeger-query-6c85d8d98f-djvcv AddedInterface Add eth0 [10.131.0.77/23] from ovn-kubernetes logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:49 +0000 UTC Normal Pod my-jaeger-query-6c85d8d98f-djvcv.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:49 +0000 UTC Normal Pod my-jaeger-query-6c85d8d98f-djvcv.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:49 +0000 UTC Normal Pod my-jaeger-query-6c85d8d98f-djvcv.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:49 +0000 UTC Normal Pod my-jaeger-query-6c85d8d98f-djvcv.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:49 +0000 UTC Normal Pod my-jaeger-query-6c85d8d98f-djvcv.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:49 +0000 UTC Normal Pod my-jaeger-query-6c85d8d98f-djvcv.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:49 +0000 UTC Normal Pod my-jaeger-query-6c85d8d98f-djvcv.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:49 +0000 UTC Normal Pod my-jaeger-query-6c85d8d98f-djvcv.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:49 +0000 UTC Normal Pod my-jaeger-query-6c85d8d98f-djvcv.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:52 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-8c47644c8 SuccessfulDelete Deleted pod: my-jaeger-collector-8c47644c8-s7h4n replicaset-controller logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:52 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled down replica set my-jaeger-collector-8c47644c8 to 0 from 1 deployment-controller logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:53 +0000 UTC Normal Pod my-jaeger-collector-8c47644c8-s7h4n.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:54 +0000 UTC Normal Pod my-jaeger-collector-7c5b4bdcfb-qkqqd Binding Scheduled Successfully assigned kuttl-test-moving-pangolin/my-jaeger-collector-7c5b4bdcfb-qkqqd to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:54 +0000 UTC Normal Pod my-jaeger-collector-7c5b4bdcfb-qkqqd AddedInterface Add eth0 [10.128.2.61/23] from ovn-kubernetes logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:54 +0000 UTC Normal Pod my-jaeger-collector-7c5b4bdcfb-qkqqd.spec.containers{jaeger-collector} Pulling Pulling image "test" kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:54 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-7c5b4bdcfb SuccessfulCreate Created pod: my-jaeger-collector-7c5b4bdcfb-qkqqd replicaset-controller logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:54 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-7c5b4bdcfb to 1 deployment-controller logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:55 +0000 UTC Warning Pod my-jaeger-collector-7c5b4bdcfb-qkqqd.spec.containers{jaeger-collector} Failed Failed to pull image "test": reading manifest latest in docker.io/library/test: requested access to the resource is denied kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:55 +0000 UTC Warning Pod my-jaeger-collector-7c5b4bdcfb-qkqqd.spec.containers{jaeger-collector} Failed Error: ErrImagePull kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:55 +0000 UTC Normal Pod my-jaeger-collector-7c5b4bdcfb-qkqqd.spec.containers{jaeger-collector} BackOff Back-off pulling image "test" kubelet logger.go:42: 07:21:56 | set-custom-img | 2023-12-18 07:21:55 +0000 UTC Warning Pod my-jaeger-collector-7c5b4bdcfb-qkqqd.spec.containers{jaeger-collector} Failed Error: ImagePullBackOff kubelet logger.go:42: 07:21:56 | set-custom-img | Deleting namespace: kuttl-test-moving-pangolin === CONT kuttl/harness/collector-otlp-production-http logger.go:42: 07:22:03 | collector-otlp-production-http | Creating namespace: kuttl-test-premium-dogfish logger.go:42: 07:22:03 | collector-otlp-production-http/1-install | starting test step 1-install logger.go:42: 07:22:03 | collector-otlp-production-http/1-install | Jaeger:kuttl-test-premium-dogfish/my-jaeger created logger.go:42: 07:22:39 | collector-otlp-production-http/1-install | test step completed 1-install logger.go:42: 07:22:39 | collector-otlp-production-http/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:22:39 | collector-otlp-production-http/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:22:41 | collector-otlp-production-http/2-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:22:47 | collector-otlp-production-http/2-smoke-test | running command: [sh -c REPORTING_PROTOCOL=http ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 07:22:48 | collector-otlp-production-http/2-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:22:48 | collector-otlp-production-http/2-smoke-test | job.batch/report-span created logger.go:42: 07:22:48 | collector-otlp-production-http/2-smoke-test | job.batch/check-span created logger.go:42: 07:23:00 | collector-otlp-production-http/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:23:00 | collector-otlp-production-http | collector-otlp-production-http events from ns kuttl-test-premium-dogfish: logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpremiumdogfishmyjaeger-1-8f99bc6c6v7 Binding Scheduled Successfully assigned kuttl-test-premium-dogfish/elasticsearch-cdm-kuttltestpremiumdogfishmyjaeger-1-8f99bc6c6v7 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpremiumdogfishmyjaeger-1-8f99bc6c6v7 AddedInterface Add eth0 [10.129.2.51/23] from ovn-kubernetes logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpremiumdogfishmyjaeger-1-8f99bc6c6v7.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:4587958c428b40ac31b46a96a752c2b338814895891023b3ba96ce4c12b5906d" already present on machine kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpremiumdogfishmyjaeger-1-8f99bc6c6v7.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpremiumdogfishmyjaeger-1-8f99bc6c6v7.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpremiumdogfishmyjaeger-1-8f99bc6c6v7.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:8c14e8afb359f1b4ea4b8ed370d15d3b0c272b39ba3b90e5b314f3ccb5ac69c5" already present on machine kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpremiumdogfishmyjaeger-1-8f99bc6c6v7.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpremiumdogfishmyjaeger-1-8f99bc6c6v7.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:09 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestpremiumdogfishmyjaeger-1-8f99bcdf5 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestpremiumdogfishmyjaeger-1-8f99bc6c6v7 replicaset-controller logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:09 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestpremiumdogfishmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestpremiumdogfishmyjaeger-1-8f99bcdf5 to 1 deployment-controller logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:19 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpremiumdogfishmyjaeger-1-8f99bc6c6v7.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:24 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpremiumdogfishmyjaeger-1-8f99bc6c6v7.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:36 +0000 UTC Normal Pod my-jaeger-collector-57cd998cd9-99k7w Binding Scheduled Successfully assigned kuttl-test-premium-dogfish/my-jaeger-collector-57cd998cd9-99k7w to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:36 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-57cd998cd9 SuccessfulCreate Created pod: my-jaeger-collector-57cd998cd9-99k7w replicaset-controller logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:36 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-57cd998cd9 to 1 deployment-controller logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:36 +0000 UTC Normal Pod my-jaeger-query-595bd99944-zcdwf Binding Scheduled Successfully assigned kuttl-test-premium-dogfish/my-jaeger-query-595bd99944-zcdwf to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:36 +0000 UTC Normal Pod my-jaeger-query-595bd99944-zcdwf AddedInterface Add eth0 [10.131.0.78/23] from ovn-kubernetes logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:36 +0000 UTC Normal Pod my-jaeger-query-595bd99944-zcdwf.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:36 +0000 UTC Normal Pod my-jaeger-query-595bd99944-zcdwf.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:36 +0000 UTC Normal Pod my-jaeger-query-595bd99944-zcdwf.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:36 +0000 UTC Normal Pod my-jaeger-query-595bd99944-zcdwf.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:36 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-595bd99944 SuccessfulCreate Created pod: my-jaeger-query-595bd99944-zcdwf replicaset-controller logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:36 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-595bd99944 to 1 deployment-controller logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:37 +0000 UTC Warning Pod my-jaeger-collector-57cd998cd9-99k7w FailedMount MountVolume.SetUp failed for volume "my-jaeger-sampling-configuration-volume" : failed to sync configmap cache: timed out waiting for the condition kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:37 +0000 UTC Normal Pod my-jaeger-query-595bd99944-zcdwf.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:37 +0000 UTC Normal Pod my-jaeger-query-595bd99944-zcdwf.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:37 +0000 UTC Normal Pod my-jaeger-query-595bd99944-zcdwf.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:37 +0000 UTC Normal Pod my-jaeger-query-595bd99944-zcdwf.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:37 +0000 UTC Normal Pod my-jaeger-query-595bd99944-zcdwf.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:38 +0000 UTC Normal Pod my-jaeger-collector-57cd998cd9-99k7w AddedInterface Add eth0 [10.128.2.62/23] from ovn-kubernetes logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:38 +0000 UTC Normal Pod my-jaeger-collector-57cd998cd9-99k7w.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:38 +0000 UTC Normal Pod my-jaeger-collector-57cd998cd9-99k7w.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:38 +0000 UTC Normal Pod my-jaeger-collector-57cd998cd9-99k7w.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:42 +0000 UTC Normal Pod my-jaeger-query-595bd99944-zcdwf.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:42 +0000 UTC Normal Pod my-jaeger-query-595bd99944-zcdwf.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:42 +0000 UTC Normal Pod my-jaeger-query-595bd99944-zcdwf.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:42 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-595bd99944 SuccessfulDelete Deleted pod: my-jaeger-query-595bd99944-zcdwf replicaset-controller logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:42 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-595bd99944 to 0 from 1 deployment-controller logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:43 +0000 UTC Normal Pod my-jaeger-query-5bd9f68764-bcfwg Binding Scheduled Successfully assigned kuttl-test-premium-dogfish/my-jaeger-query-5bd9f68764-bcfwg to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:43 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-5bd9f68764 SuccessfulCreate Created pod: my-jaeger-query-5bd9f68764-bcfwg replicaset-controller logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:43 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-5bd9f68764 to 1 deployment-controller logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:44 +0000 UTC Normal Pod my-jaeger-query-5bd9f68764-bcfwg AddedInterface Add eth0 [10.131.0.79/23] from ovn-kubernetes logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:44 +0000 UTC Normal Pod my-jaeger-query-5bd9f68764-bcfwg.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:44 +0000 UTC Normal Pod my-jaeger-query-5bd9f68764-bcfwg.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:44 +0000 UTC Normal Pod my-jaeger-query-5bd9f68764-bcfwg.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:44 +0000 UTC Normal Pod my-jaeger-query-5bd9f68764-bcfwg.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:44 +0000 UTC Normal Pod my-jaeger-query-5bd9f68764-bcfwg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:44 +0000 UTC Normal Pod my-jaeger-query-5bd9f68764-bcfwg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:44 +0000 UTC Normal Pod my-jaeger-query-5bd9f68764-bcfwg.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:44 +0000 UTC Normal Pod my-jaeger-query-5bd9f68764-bcfwg.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:44 +0000 UTC Normal Pod my-jaeger-query-5bd9f68764-bcfwg.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:48 +0000 UTC Normal Pod check-span-927h6 Binding Scheduled Successfully assigned kuttl-test-premium-dogfish/check-span-927h6 to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:48 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-927h6 job-controller logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:48 +0000 UTC Normal Pod report-span-6fnfq Binding Scheduled Successfully assigned kuttl-test-premium-dogfish/report-span-6fnfq to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:48 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-6fnfq job-controller logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:49 +0000 UTC Normal Pod check-span-927h6 AddedInterface Add eth0 [10.128.2.64/23] from ovn-kubernetes logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:49 +0000 UTC Normal Pod check-span-927h6.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:49 +0000 UTC Normal Pod check-span-927h6.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:49 +0000 UTC Normal Pod check-span-927h6.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:49 +0000 UTC Normal Pod report-span-6fnfq AddedInterface Add eth0 [10.128.2.63/23] from ovn-kubernetes logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:49 +0000 UTC Normal Pod report-span-6fnfq.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:49 +0000 UTC Normal Pod report-span-6fnfq.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:49 +0000 UTC Normal Pod report-span-6fnfq.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:51 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:51 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:22:51 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:23:00 | collector-otlp-production-http | 2023-12-18 07:23:00 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:23:00 | collector-otlp-production-http | Deleting namespace: kuttl-test-premium-dogfish === CONT kuttl/harness/collector-otlp-allinone-grpc logger.go:42: 07:23:14 | collector-otlp-allinone-grpc | Creating namespace: kuttl-test-direct-cow logger.go:42: 07:23:14 | collector-otlp-allinone-grpc/0-install | starting test step 0-install logger.go:42: 07:23:14 | collector-otlp-allinone-grpc/0-install | Jaeger:kuttl-test-direct-cow/my-jaeger created logger.go:42: 07:23:20 | collector-otlp-allinone-grpc/0-install | test step completed 0-install logger.go:42: 07:23:20 | collector-otlp-allinone-grpc/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:23:20 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:23:21 | collector-otlp-allinone-grpc/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:23:28 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=grpc ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 07:23:28 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:23:29 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/report-span created logger.go:42: 07:23:29 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/check-span created logger.go:42: 07:23:48 | collector-otlp-allinone-grpc/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | collector-otlp-allinone-grpc events from ns kuttl-test-direct-cow: logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:18 +0000 UTC Normal Pod my-jaeger-84f67975ff-q7mp4 Binding Scheduled Successfully assigned kuttl-test-direct-cow/my-jaeger-84f67975ff-q7mp4 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:18 +0000 UTC Normal Pod my-jaeger-84f67975ff-q7mp4 AddedInterface Add eth0 [10.129.2.52/23] from ovn-kubernetes logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:18 +0000 UTC Normal Pod my-jaeger-84f67975ff-q7mp4.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:18 +0000 UTC Normal Pod my-jaeger-84f67975ff-q7mp4.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:18 +0000 UTC Normal Pod my-jaeger-84f67975ff-q7mp4.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:18 +0000 UTC Normal Pod my-jaeger-84f67975ff-q7mp4.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:18 +0000 UTC Normal Pod my-jaeger-84f67975ff-q7mp4.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:18 +0000 UTC Normal Pod my-jaeger-84f67975ff-q7mp4.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:18 +0000 UTC Normal ReplicaSet.apps my-jaeger-84f67975ff SuccessfulCreate Created pod: my-jaeger-84f67975ff-q7mp4 replicaset-controller logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:18 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-84f67975ff to 1 deployment-controller logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:23 +0000 UTC Normal Pod my-jaeger-84f67975ff-q7mp4.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:23 +0000 UTC Normal Pod my-jaeger-84f67975ff-q7mp4.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:23 +0000 UTC Normal ReplicaSet.apps my-jaeger-84f67975ff SuccessfulDelete Deleted pod: my-jaeger-84f67975ff-q7mp4 replicaset-controller logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:23 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-84f67975ff to 0 from 1 deployment-controller logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:24 +0000 UTC Normal Pod my-jaeger-56888467cc-854x8 Binding Scheduled Successfully assigned kuttl-test-direct-cow/my-jaeger-56888467cc-854x8 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:24 +0000 UTC Normal Pod my-jaeger-56888467cc-854x8 AddedInterface Add eth0 [10.129.2.53/23] from ovn-kubernetes logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:24 +0000 UTC Normal Pod my-jaeger-56888467cc-854x8.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:24 +0000 UTC Normal Pod my-jaeger-56888467cc-854x8.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:24 +0000 UTC Normal Pod my-jaeger-56888467cc-854x8.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:24 +0000 UTC Normal Pod my-jaeger-56888467cc-854x8.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:24 +0000 UTC Normal Pod my-jaeger-56888467cc-854x8.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:24 +0000 UTC Normal Pod my-jaeger-56888467cc-854x8.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:24 +0000 UTC Normal ReplicaSet.apps my-jaeger-56888467cc SuccessfulCreate Created pod: my-jaeger-56888467cc-854x8 replicaset-controller logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:24 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-56888467cc to 1 deployment-controller logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:29 +0000 UTC Normal Pod check-span-4qtpc Binding Scheduled Successfully assigned kuttl-test-direct-cow/check-span-4qtpc to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:29 +0000 UTC Normal Pod check-span-4qtpc AddedInterface Add eth0 [10.128.2.65/23] from ovn-kubernetes logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:29 +0000 UTC Normal Pod check-span-4qtpc.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:29 +0000 UTC Normal Pod check-span-4qtpc.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:29 +0000 UTC Normal Pod check-span-4qtpc.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:29 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-4qtpc job-controller logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:29 +0000 UTC Normal Pod report-span-r2cxs Binding Scheduled Successfully assigned kuttl-test-direct-cow/report-span-r2cxs to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:29 +0000 UTC Normal Pod report-span-r2cxs AddedInterface Add eth0 [10.131.0.80/23] from ovn-kubernetes logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:29 +0000 UTC Normal Pod report-span-r2cxs.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:29 +0000 UTC Normal Pod report-span-r2cxs.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:29 +0000 UTC Normal Pod report-span-r2cxs.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:29 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-r2cxs job-controller logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | 2023-12-18 07:23:47 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:23:48 | collector-otlp-allinone-grpc | Deleting namespace: kuttl-test-direct-cow === CONT kuttl/harness/collector-otlp-allinone-http logger.go:42: 07:24:01 | collector-otlp-allinone-http | Creating namespace: kuttl-test-divine-snipe logger.go:42: 07:24:01 | collector-otlp-allinone-http/0-install | starting test step 0-install logger.go:42: 07:24:01 | collector-otlp-allinone-http/0-install | Jaeger:kuttl-test-divine-snipe/my-jaeger created logger.go:42: 07:24:07 | collector-otlp-allinone-http/0-install | test step completed 0-install logger.go:42: 07:24:07 | collector-otlp-allinone-http/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:24:07 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:24:08 | collector-otlp-allinone-http/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:24:16 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=http ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 07:24:17 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:24:17 | collector-otlp-allinone-http/1-smoke-test | job.batch/report-span created logger.go:42: 07:24:17 | collector-otlp-allinone-http/1-smoke-test | job.batch/check-span created logger.go:42: 07:24:29 | collector-otlp-allinone-http/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:24:29 | collector-otlp-allinone-http | collector-otlp-allinone-http events from ns kuttl-test-divine-snipe: logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:04 +0000 UTC Normal Pod my-jaeger-5fb7ff9d95-7zr9t Binding Scheduled Successfully assigned kuttl-test-divine-snipe/my-jaeger-5fb7ff9d95-7zr9t to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:04 +0000 UTC Normal ReplicaSet.apps my-jaeger-5fb7ff9d95 SuccessfulCreate Created pod: my-jaeger-5fb7ff9d95-7zr9t replicaset-controller logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:04 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-5fb7ff9d95 to 1 deployment-controller logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:05 +0000 UTC Normal Pod my-jaeger-5fb7ff9d95-7zr9t AddedInterface Add eth0 [10.129.2.54/23] from ovn-kubernetes logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:05 +0000 UTC Normal Pod my-jaeger-5fb7ff9d95-7zr9t.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:05 +0000 UTC Normal Pod my-jaeger-5fb7ff9d95-7zr9t.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:05 +0000 UTC Normal Pod my-jaeger-5fb7ff9d95-7zr9t.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:05 +0000 UTC Normal Pod my-jaeger-5fb7ff9d95-7zr9t.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:05 +0000 UTC Normal Pod my-jaeger-5fb7ff9d95-7zr9t.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:05 +0000 UTC Normal Pod my-jaeger-5fb7ff9d95-7zr9t.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:10 +0000 UTC Normal Pod my-jaeger-5fb7ff9d95-7zr9t.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:10 +0000 UTC Normal Pod my-jaeger-5fb7ff9d95-7zr9t.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:10 +0000 UTC Normal ReplicaSet.apps my-jaeger-5fb7ff9d95 SuccessfulDelete Deleted pod: my-jaeger-5fb7ff9d95-7zr9t replicaset-controller logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:10 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-5fb7ff9d95 to 0 from 1 deployment-controller logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:11 +0000 UTC Normal Pod my-jaeger-c8f757c55-lxxhd Binding Scheduled Successfully assigned kuttl-test-divine-snipe/my-jaeger-c8f757c55-lxxhd to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:11 +0000 UTC Normal Pod my-jaeger-c8f757c55-lxxhd AddedInterface Add eth0 [10.129.2.55/23] from ovn-kubernetes logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:11 +0000 UTC Normal Pod my-jaeger-c8f757c55-lxxhd.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:11 +0000 UTC Normal Pod my-jaeger-c8f757c55-lxxhd.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:11 +0000 UTC Normal Pod my-jaeger-c8f757c55-lxxhd.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:11 +0000 UTC Normal Pod my-jaeger-c8f757c55-lxxhd.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:11 +0000 UTC Normal Pod my-jaeger-c8f757c55-lxxhd.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:11 +0000 UTC Normal Pod my-jaeger-c8f757c55-lxxhd.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:11 +0000 UTC Normal ReplicaSet.apps my-jaeger-c8f757c55 SuccessfulCreate Created pod: my-jaeger-c8f757c55-lxxhd replicaset-controller logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:11 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-c8f757c55 to 1 deployment-controller logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:17 +0000 UTC Normal Pod check-span-hphrl Binding Scheduled Successfully assigned kuttl-test-divine-snipe/check-span-hphrl to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:17 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-hphrl job-controller logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:17 +0000 UTC Normal Pod report-span-x2c4n Binding Scheduled Successfully assigned kuttl-test-divine-snipe/report-span-x2c4n to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:17 +0000 UTC Normal Pod report-span-x2c4n AddedInterface Add eth0 [10.131.0.81/23] from ovn-kubernetes logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:17 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-x2c4n job-controller logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:18 +0000 UTC Normal Pod check-span-hphrl AddedInterface Add eth0 [10.128.2.66/23] from ovn-kubernetes logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:18 +0000 UTC Normal Pod check-span-hphrl.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:18 +0000 UTC Normal Pod check-span-hphrl.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:18 +0000 UTC Normal Pod check-span-hphrl.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:18 +0000 UTC Normal Pod report-span-x2c4n.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:18 +0000 UTC Normal Pod report-span-x2c4n.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:18 +0000 UTC Normal Pod report-span-x2c4n.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:24:29 | collector-otlp-allinone-http | 2023-12-18 07:24:28 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:24:29 | collector-otlp-allinone-http | Deleting namespace: kuttl-test-divine-snipe === CONT kuttl/harness/collector-autoscale logger.go:42: 07:24:42 | collector-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:24:42 | collector-autoscale | Creating namespace: kuttl-test-included-marten logger.go:42: 07:24:42 | collector-autoscale/1-install | starting test step 1-install logger.go:42: 07:24:42 | collector-autoscale/1-install | Jaeger:kuttl-test-included-marten/simple-prod created logger.go:42: 07:25:19 | collector-autoscale/1-install | test step completed 1-install logger.go:42: 07:25:19 | collector-autoscale/2- | starting test step 2- logger.go:42: 07:25:19 | collector-autoscale/2- | test step completed 2- logger.go:42: 07:25:19 | collector-autoscale | collector-autoscale events from ns kuttl-test-included-marten: logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:24:48 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestincludedmartensimpleprod-1-79dd96578f SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestincludedmartensimpleprod-1-79ddxs6jm replicaset-controller logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:24:48 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestincludedmartensimpleprod-1-79ddxs6jm Binding Scheduled Successfully assigned kuttl-test-included-marten/elasticsearch-cdm-kuttltestincludedmartensimpleprod-1-79ddxs6jm to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:24:48 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestincludedmartensimpleprod-1-79ddxs6jm FailedMount MountVolume.SetUp failed for volume "elasticsearch-metrics" : secret "elasticsearch-metrics" not found kubelet logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:24:48 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestincludedmartensimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestincludedmartensimpleprod-1-79dd96578f to 1 deployment-controller logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:24:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestincludedmartensimpleprod-1-79ddxs6jm AddedInterface Add eth0 [10.129.2.56/23] from ovn-kubernetes logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:24:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestincludedmartensimpleprod-1-79ddxs6jm.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:4587958c428b40ac31b46a96a752c2b338814895891023b3ba96ce4c12b5906d" already present on machine kubelet logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:24:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestincludedmartensimpleprod-1-79ddxs6jm.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:24:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestincludedmartensimpleprod-1-79ddxs6jm.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:24:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestincludedmartensimpleprod-1-79ddxs6jm.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:8c14e8afb359f1b4ea4b8ed370d15d3b0c272b39ba3b90e5b314f3ccb5ac69c5" already present on machine kubelet logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:24:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestincludedmartensimpleprod-1-79ddxs6jm.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:24:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestincludedmartensimpleprod-1-79ddxs6jm.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:24:59 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestincludedmartensimpleprod-1-79ddxs6jm.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:04 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestincludedmartensimpleprod-1-79ddxs6jm.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:15 +0000 UTC Normal Pod simple-prod-collector-565bb44547-kmvbj Binding Scheduled Successfully assigned kuttl-test-included-marten/simple-prod-collector-565bb44547-kmvbj to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:15 +0000 UTC Normal Pod simple-prod-collector-565bb44547-kmvbj AddedInterface Add eth0 [10.128.2.67/23] from ovn-kubernetes logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:15 +0000 UTC Normal Pod simple-prod-collector-565bb44547-kmvbj.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:15 +0000 UTC Normal Pod simple-prod-collector-565bb44547-kmvbj.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:15 +0000 UTC Normal Pod simple-prod-collector-565bb44547-kmvbj.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:15 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-565bb44547 SuccessfulCreate Created pod: simple-prod-collector-565bb44547-kmvbj replicaset-controller logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:15 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-565bb44547 to 1 deployment-controller logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:15 +0000 UTC Normal Pod simple-prod-query-9b9b867bd-m5mf7 Binding Scheduled Successfully assigned kuttl-test-included-marten/simple-prod-query-9b9b867bd-m5mf7 to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:15 +0000 UTC Warning Pod simple-prod-query-9b9b867bd-m5mf7 FailedMount MountVolume.SetUp failed for volume "simple-prod-ui-oauth-proxy-tls" : secret "simple-prod-ui-oauth-proxy-tls" not found kubelet logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:15 +0000 UTC Normal ReplicaSet.apps simple-prod-query-9b9b867bd SuccessfulCreate Created pod: simple-prod-query-9b9b867bd-m5mf7 replicaset-controller logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:15 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-9b9b867bd to 1 deployment-controller logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:16 +0000 UTC Normal Pod simple-prod-query-9b9b867bd-m5mf7 AddedInterface Add eth0 [10.131.0.82/23] from ovn-kubernetes logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:16 +0000 UTC Normal Pod simple-prod-query-9b9b867bd-m5mf7.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:16 +0000 UTC Normal Pod simple-prod-query-9b9b867bd-m5mf7.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:16 +0000 UTC Normal Pod simple-prod-query-9b9b867bd-m5mf7.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:16 +0000 UTC Normal Pod simple-prod-query-9b9b867bd-m5mf7.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:16 +0000 UTC Normal Pod simple-prod-query-9b9b867bd-m5mf7.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:16 +0000 UTC Normal Pod simple-prod-query-9b9b867bd-m5mf7.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:16 +0000 UTC Normal Pod simple-prod-query-9b9b867bd-m5mf7.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:16 +0000 UTC Normal Pod simple-prod-query-9b9b867bd-m5mf7.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:25:19 | collector-autoscale | 2023-12-18 07:25:16 +0000 UTC Normal Pod simple-prod-query-9b9b867bd-m5mf7.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:25:19 | collector-autoscale | Deleting namespace: kuttl-test-included-marten === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (333.24s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.25s) --- PASS: kuttl/harness/collector-otlp-production-grpc (76.40s) --- PASS: kuttl/harness/set-custom-img (47.99s) --- PASS: kuttl/harness/collector-otlp-production-http (71.10s) --- PASS: kuttl/harness/collector-otlp-allinone-grpc (46.77s) --- PASS: kuttl/harness/collector-otlp-allinone-http (41.28s) --- PASS: kuttl/harness/collector-autoscale (43.41s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name miscellaneous --report --output /logs/artifacts/miscellaneous.xml ./artifacts/kuttl-report.xml time="2023-12-18T07:25:26Z" level=debug msg="Setting a new name for the test suites" time="2023-12-18T07:25:26Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-18T07:25:26Z" level=debug msg="normalizing test case names" time="2023-12-18T07:25:26Z" level=debug msg="miscellaneous/artifacts -> miscellaneous_artifacts" time="2023-12-18T07:25:26Z" level=debug msg="miscellaneous/collector-otlp-production-grpc -> miscellaneous_collector_otlp_production_grpc" time="2023-12-18T07:25:26Z" level=debug msg="miscellaneous/set-custom-img -> miscellaneous_set_custom_img" time="2023-12-18T07:25:26Z" level=debug msg="miscellaneous/collector-otlp-production-http -> miscellaneous_collector_otlp_production_http" time="2023-12-18T07:25:26Z" level=debug msg="miscellaneous/collector-otlp-allinone-grpc -> miscellaneous_collector_otlp_allinone_grpc" time="2023-12-18T07:25:26Z" level=debug msg="miscellaneous/collector-otlp-allinone-http -> miscellaneous_collector_otlp_allinone_http" time="2023-12-18T07:25:26Z" level=debug msg="miscellaneous/collector-autoscale -> miscellaneous_collector_autoscale" +----------------------------------------------+--------+ | NAME | RESULT | +----------------------------------------------+--------+ | miscellaneous_artifacts | passed | | miscellaneous_collector_otlp_production_grpc | passed | | miscellaneous_set_custom_img | passed | | miscellaneous_collector_otlp_production_http | passed | | miscellaneous_collector_otlp_allinone_grpc | passed | | miscellaneous_collector_otlp_allinone_http | passed | | miscellaneous_collector_autoscale | passed | +----------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh sidecar false true + '[' 3 -ne 3 ']' + test_suite_name=sidecar + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/sidecar.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-sidecar make[2]: Entering directory '/tmp/jaeger-tests' ./tests/e2e/sidecar/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-17-173511 True False 47m Cluster version is 4.15.0-0.nightly-2023-12-17-173511' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-17-173511 True False 47m Cluster version is 4.15.0-0.nightly-2023-12-17-173511' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/sidecar/render.sh ++ export SUITE_DIR=./tests/e2e/sidecar ++ SUITE_DIR=./tests/e2e/sidecar ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/sidecar ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + jaeger_service_name=order + start_test sidecar-deployment + '[' 1 -ne 1 ']' + test_name=sidecar-deployment + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-deployment' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-deployment\e[0m' Rendering files for test sidecar-deployment + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build + '[' _build '!=' _build ']' + mkdir -p sidecar-deployment + cd sidecar-deployment + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-namespace + '[' 1 -ne 1 ']' + test_name=sidecar-namespace + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-namespace' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-namespace\e[0m' Rendering files for test sidecar-namespace + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-deployment + '[' sidecar-deployment '!=' _build ']' + cd .. + mkdir -p sidecar-namespace + cd sidecar-namespace + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-skip-webhook + '[' 1 -ne 1 ']' + test_name=sidecar-skip-webhook + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-skip-webhook' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-skip-webhook\e[0m' Rendering files for test sidecar-skip-webhook + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-namespace + '[' sidecar-namespace '!=' _build ']' + cd .. + mkdir -p sidecar-skip-webhook + cd sidecar-skip-webhook + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running sidecar E2E tests' Running sidecar E2E tests + cd tests/e2e/sidecar/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3803946269 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 4 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/sidecar-deployment === PAUSE kuttl/harness/sidecar-deployment === RUN kuttl/harness/sidecar-namespace === PAUSE kuttl/harness/sidecar-namespace === RUN kuttl/harness/sidecar-skip-webhook === PAUSE kuttl/harness/sidecar-skip-webhook === CONT kuttl/harness/artifacts logger.go:42: 07:25:34 | artifacts | Creating namespace: kuttl-test-comic-catfish logger.go:42: 07:25:34 | artifacts | artifacts events from ns kuttl-test-comic-catfish: logger.go:42: 07:25:34 | artifacts | Deleting namespace: kuttl-test-comic-catfish === CONT kuttl/harness/sidecar-namespace logger.go:42: 07:25:40 | sidecar-namespace | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:25:40 | sidecar-namespace | Creating namespace: kuttl-test-internal-ewe logger.go:42: 07:25:40 | sidecar-namespace/0-install | starting test step 0-install logger.go:42: 07:25:40 | sidecar-namespace/0-install | Jaeger:kuttl-test-internal-ewe/agent-as-sidecar created logger.go:42: 07:25:46 | sidecar-namespace/0-install | test step completed 0-install logger.go:42: 07:25:46 | sidecar-namespace/1-install | starting test step 1-install logger.go:42: 07:25:46 | sidecar-namespace/1-install | Deployment:kuttl-test-internal-ewe/vertx-create-span-sidecar created logger.go:42: 07:25:47 | sidecar-namespace/1-install | test step completed 1-install logger.go:42: 07:25:47 | sidecar-namespace/2-enable-injection | starting test step 2-enable-injection logger.go:42: 07:25:47 | sidecar-namespace/2-enable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="true"] logger.go:42: 07:25:47 | sidecar-namespace/2-enable-injection | namespace/kuttl-test-internal-ewe annotate logger.go:42: 07:25:53 | sidecar-namespace/2-enable-injection | test step completed 2-enable-injection logger.go:42: 07:25:53 | sidecar-namespace/3-find-service | starting test step 3-find-service logger.go:42: 07:25:53 | sidecar-namespace/3-find-service | Job:kuttl-test-internal-ewe/00-find-service created logger.go:42: 07:26:06 | sidecar-namespace/3-find-service | test step completed 3-find-service logger.go:42: 07:26:06 | sidecar-namespace/4-other-instance | starting test step 4-other-instance logger.go:42: 07:26:06 | sidecar-namespace/4-other-instance | Jaeger:kuttl-test-internal-ewe/agent-as-sidecar2 created logger.go:42: 07:26:15 | sidecar-namespace/4-other-instance | test step completed 4-other-instance logger.go:42: 07:26:15 | sidecar-namespace/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 07:26:16 | sidecar-namespace/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 07:26:16 | sidecar-namespace/6-find-service | starting test step 6-find-service logger.go:42: 07:26:16 | sidecar-namespace/6-find-service | Job:kuttl-test-internal-ewe/01-find-service created logger.go:42: 07:26:35 | sidecar-namespace/6-find-service | test step completed 6-find-service logger.go:42: 07:26:35 | sidecar-namespace/7-disable-injection | starting test step 7-disable-injection logger.go:42: 07:26:35 | sidecar-namespace/7-disable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="false"] logger.go:42: 07:26:35 | sidecar-namespace/7-disable-injection | namespace/kuttl-test-internal-ewe annotate logger.go:42: 07:26:40 | sidecar-namespace/7-disable-injection | test step completed 7-disable-injection logger.go:42: 07:26:40 | sidecar-namespace | sidecar-namespace events from ns kuttl-test-internal-ewe: logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:43 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-6d675ffb4b to 1 deployment-controller logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:44 +0000 UTC Normal Pod agent-as-sidecar-6d675ffb4b-rbw2d Binding Scheduled Successfully assigned kuttl-test-internal-ewe/agent-as-sidecar-6d675ffb4b-rbw2d to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:44 +0000 UTC Normal Pod agent-as-sidecar-6d675ffb4b-rbw2d AddedInterface Add eth0 [10.129.2.57/23] from ovn-kubernetes logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:44 +0000 UTC Normal Pod agent-as-sidecar-6d675ffb4b-rbw2d.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:44 +0000 UTC Normal Pod agent-as-sidecar-6d675ffb4b-rbw2d.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:44 +0000 UTC Normal Pod agent-as-sidecar-6d675ffb4b-rbw2d.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:44 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-6d675ffb4b SuccessfulCreate Created pod: agent-as-sidecar-6d675ffb4b-rbw2d replicaset-controller logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:46 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-k4rfv Binding Scheduled Successfully assigned kuttl-test-internal-ewe/vertx-create-span-sidecar-84d458b68c-k4rfv to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:46 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulCreate Created pod: vertx-create-span-sidecar-84d458b68c-k4rfv replicaset-controller logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:46 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-84d458b68c to 1 deployment-controller logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:47 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-k4rfv AddedInterface Add eth0 [10.129.2.58/23] from ovn-kubernetes logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:47 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-k4rfv.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:47 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-k4rfv.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:47 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-k4rfv.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:47 +0000 UTC Normal Pod vertx-create-span-sidecar-cb75d4656-2llb9 Binding Scheduled Successfully assigned kuttl-test-internal-ewe/vertx-create-span-sidecar-cb75d4656-2llb9 to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:47 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-cb75d4656 SuccessfulCreate Created pod: vertx-create-span-sidecar-cb75d4656-2llb9 replicaset-controller logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:47 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-cb75d4656 to 1 deployment-controller logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:48 +0000 UTC Normal Pod vertx-create-span-sidecar-cb75d4656-2llb9 AddedInterface Add eth0 [10.128.2.68/23] from ovn-kubernetes logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:48 +0000 UTC Normal Pod vertx-create-span-sidecar-cb75d4656-2llb9.spec.containers{vertx-create-span-sidecar} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:51 +0000 UTC Normal Pod vertx-create-span-sidecar-cb75d4656-2llb9.spec.containers{vertx-create-span-sidecar} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.445s (3.445s including waiting) kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:51 +0000 UTC Normal Pod vertx-create-span-sidecar-cb75d4656-2llb9.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:51 +0000 UTC Normal Pod vertx-create-span-sidecar-cb75d4656-2llb9.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:51 +0000 UTC Normal Pod vertx-create-span-sidecar-cb75d4656-2llb9.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:52 +0000 UTC Normal Pod vertx-create-span-sidecar-cb75d4656-2llb9.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:52 +0000 UTC Normal Pod vertx-create-span-sidecar-cb75d4656-2llb9.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:53 +0000 UTC Normal Pod 00-find-service-zdhhx Binding Scheduled Successfully assigned kuttl-test-internal-ewe/00-find-service-zdhhx to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:53 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-zdhhx job-controller logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:54 +0000 UTC Normal Pod 00-find-service-zdhhx AddedInterface Add eth0 [10.131.0.83/23] from ovn-kubernetes logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:54 +0000 UTC Normal Pod 00-find-service-zdhhx.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:54 +0000 UTC Normal Pod 00-find-service-zdhhx.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:54 +0000 UTC Normal Pod 00-find-service-zdhhx.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:56 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-k4rfv.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.58:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:56 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-k4rfv.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.129.2.58:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:58 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-k4rfv.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:58 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-k4rfv.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.58:8080/": read tcp 10.129.2.2:56578->10.129.2.58:8080: read: connection reset by peer kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:58 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-k4rfv.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.58:8080/": dial tcp 10.129.2.58:8080: connect: connection refused kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:59 +0000 UTC Warning Pod vertx-create-span-sidecar-cb75d4656-2llb9.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.68:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:25:59 +0000 UTC Warning Pod vertx-create-span-sidecar-cb75d4656-2llb9.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.68:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:01 +0000 UTC Normal Pod vertx-create-span-sidecar-cb75d4656-2llb9.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:01 +0000 UTC Warning Pod vertx-create-span-sidecar-cb75d4656-2llb9.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.68:8080/": read tcp 10.128.2.2:44766->10.128.2.68:8080: read: connection reset by peer kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:01 +0000 UTC Warning Pod vertx-create-span-sidecar-cb75d4656-2llb9.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.68:8080/": dial tcp 10.128.2.68:8080: connect: connection refused kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:01 +0000 UTC Normal Pod vertx-create-span-sidecar-cb75d4656-2llb9.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:05 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:09 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-k4rfv.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.58:8080/": read tcp 10.129.2.2:34178->10.129.2.58:8080: read: connection reset by peer kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:12 +0000 UTC Normal Pod agent-as-sidecar2-69dbf88dc4-kn97v Binding Scheduled Successfully assigned kuttl-test-internal-ewe/agent-as-sidecar2-69dbf88dc4-kn97v to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:12 +0000 UTC Normal Pod agent-as-sidecar2-69dbf88dc4-kn97v AddedInterface Add eth0 [10.129.2.59/23] from ovn-kubernetes logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:12 +0000 UTC Normal Pod agent-as-sidecar2-69dbf88dc4-kn97v.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:12 +0000 UTC Normal Pod agent-as-sidecar2-69dbf88dc4-kn97v.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:12 +0000 UTC Normal Pod agent-as-sidecar2-69dbf88dc4-kn97v.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:12 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-69dbf88dc4 SuccessfulCreate Created pod: agent-as-sidecar2-69dbf88dc4-kn97v replicaset-controller logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:12 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-69dbf88dc4 to 1 deployment-controller logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:15 +0000 UTC Normal Pod agent-as-sidecar-6d675ffb4b-rbw2d.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:16 +0000 UTC Normal Pod 01-find-service-jbwd8 Binding Scheduled Successfully assigned kuttl-test-internal-ewe/01-find-service-jbwd8 to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:16 +0000 UTC Normal Pod 01-find-service-jbwd8 AddedInterface Add eth0 [10.131.0.84/23] from ovn-kubernetes logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:16 +0000 UTC Normal Pod 01-find-service-jbwd8.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:16 +0000 UTC Normal Pod 01-find-service-jbwd8.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:16 +0000 UTC Normal Pod 01-find-service-jbwd8.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:16 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-jbwd8 job-controller logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:20 +0000 UTC Normal Pod vertx-create-span-sidecar-7b85f6bbcd-8tvrg Binding Scheduled Successfully assigned kuttl-test-internal-ewe/vertx-create-span-sidecar-7b85f6bbcd-8tvrg to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:20 +0000 UTC Normal Pod vertx-create-span-sidecar-7b85f6bbcd-8tvrg AddedInterface Add eth0 [10.129.2.60/23] from ovn-kubernetes logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:20 +0000 UTC Normal Pod vertx-create-span-sidecar-7b85f6bbcd-8tvrg.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:20 +0000 UTC Normal Pod vertx-create-span-sidecar-7b85f6bbcd-8tvrg.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:20 +0000 UTC Normal Pod vertx-create-span-sidecar-7b85f6bbcd-8tvrg.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:20 +0000 UTC Normal Pod vertx-create-span-sidecar-7b85f6bbcd-8tvrg.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:20 +0000 UTC Normal Pod vertx-create-span-sidecar-7b85f6bbcd-8tvrg.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:20 +0000 UTC Normal Pod vertx-create-span-sidecar-7b85f6bbcd-8tvrg.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:20 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7b85f6bbcd SuccessfulCreate Created pod: vertx-create-span-sidecar-7b85f6bbcd-8tvrg replicaset-controller logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:20 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulDelete Deleted pod: vertx-create-span-sidecar-84d458b68c-k4rfv replicaset-controller logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:20 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-84d458b68c to 0 from 1 deployment-controller logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:20 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-7b85f6bbcd to 1 from 0 deployment-controller logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:28 +0000 UTC Warning Pod vertx-create-span-sidecar-7b85f6bbcd-8tvrg.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.129.2.60:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:28 +0000 UTC Warning Pod vertx-create-span-sidecar-7b85f6bbcd-8tvrg.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.60:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:30 +0000 UTC Normal Pod vertx-create-span-sidecar-7b85f6bbcd-8tvrg.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:31 +0000 UTC Warning Pod vertx-create-span-sidecar-7b85f6bbcd-8tvrg.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.60:8080/": read tcp 10.129.2.2:42742->10.129.2.60:8080: read: connection reset by peer kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:31 +0000 UTC Warning Pod vertx-create-span-sidecar-7b85f6bbcd-8tvrg.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.60:8080/": dial tcp 10.129.2.60:8080: connect: connection refused kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:35 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:35 +0000 UTC Normal Pod vertx-create-span-sidecar-5b9947c7c5-gb2c5 Binding Scheduled Successfully assigned kuttl-test-internal-ewe/vertx-create-span-sidecar-5b9947c7c5-gb2c5 to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:35 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-5b9947c7c5 SuccessfulCreate Created pod: vertx-create-span-sidecar-5b9947c7c5-gb2c5 replicaset-controller logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:35 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-cb75d4656 SuccessfulDelete Deleted pod: vertx-create-span-sidecar-cb75d4656-2llb9 replicaset-controller logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:35 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-cb75d4656 to 0 from 1 deployment-controller logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:35 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-5b9947c7c5 to 1 from 0 deployment-controller logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:36 +0000 UTC Normal Pod vertx-create-span-sidecar-5b9947c7c5-gb2c5 AddedInterface Add eth0 [10.131.0.85/23] from ovn-kubernetes logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:36 +0000 UTC Normal Pod vertx-create-span-sidecar-5b9947c7c5-gb2c5.spec.containers{vertx-create-span-sidecar} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:39 +0000 UTC Normal Pod vertx-create-span-sidecar-5b9947c7c5-gb2c5.spec.containers{vertx-create-span-sidecar} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.412s (3.412s including waiting) kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:39 +0000 UTC Normal Pod vertx-create-span-sidecar-5b9947c7c5-gb2c5.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:26:40 | sidecar-namespace | 2023-12-18 07:26:39 +0000 UTC Normal Pod vertx-create-span-sidecar-5b9947c7c5-gb2c5.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:26:40 | sidecar-namespace | Deleting namespace: kuttl-test-internal-ewe === CONT kuttl/harness/sidecar-skip-webhook logger.go:42: 07:26:47 | sidecar-skip-webhook | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:26:47 | sidecar-skip-webhook | Creating namespace: kuttl-test-useful-pelican logger.go:42: 07:26:47 | sidecar-skip-webhook/0-install | starting test step 0-install logger.go:42: 07:26:48 | sidecar-skip-webhook/0-install | Jaeger:kuttl-test-useful-pelican/agent-as-sidecar created logger.go:42: 07:26:54 | sidecar-skip-webhook/0-install | test step completed 0-install logger.go:42: 07:26:54 | sidecar-skip-webhook/1-install | starting test step 1-install logger.go:42: 07:26:54 | sidecar-skip-webhook/1-install | Deployment:kuttl-test-useful-pelican/vertx-create-span-sidecar created logger.go:42: 07:26:56 | sidecar-skip-webhook/1-install | test step completed 1-install logger.go:42: 07:26:56 | sidecar-skip-webhook/2-add-anotation-and-label | starting test step 2-add-anotation-and-label logger.go:42: 07:26:56 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name=jaeger-operator --namespace kuttl-test-useful-pelican] logger.go:42: 07:26:56 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar labeled logger.go:42: 07:26:56 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-useful-pelican] logger.go:42: 07:26:56 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar annotate logger.go:42: 07:26:56 | sidecar-skip-webhook/2-add-anotation-and-label | test step completed 2-add-anotation-and-label logger.go:42: 07:26:56 | sidecar-skip-webhook/3-remove-label | starting test step 3-remove-label logger.go:42: 07:26:56 | sidecar-skip-webhook/3-remove-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name- --namespace kuttl-test-useful-pelican] logger.go:42: 07:26:56 | sidecar-skip-webhook/3-remove-label | deployment.apps/vertx-create-span-sidecar unlabeled logger.go:42: 07:26:57 | sidecar-skip-webhook/3-remove-label | test step completed 3-remove-label logger.go:42: 07:26:57 | sidecar-skip-webhook | sidecar-skip-webhook events from ns kuttl-test-useful-pelican: logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:51 +0000 UTC Normal Pod agent-as-sidecar-7488776788-jrnn6 Binding Scheduled Successfully assigned kuttl-test-useful-pelican/agent-as-sidecar-7488776788-jrnn6 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:51 +0000 UTC Normal Pod agent-as-sidecar-7488776788-jrnn6 AddedInterface Add eth0 [10.129.2.61/23] from ovn-kubernetes logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:51 +0000 UTC Normal Pod agent-as-sidecar-7488776788-jrnn6.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:51 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-7488776788 SuccessfulCreate Created pod: agent-as-sidecar-7488776788-jrnn6 replicaset-controller logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:51 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-7488776788 to 1 deployment-controller logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:52 +0000 UTC Normal Pod agent-as-sidecar-7488776788-jrnn6.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:52 +0000 UTC Normal Pod agent-as-sidecar-7488776788-jrnn6.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:54 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-tsmr5 Binding Scheduled Successfully assigned kuttl-test-useful-pelican/vertx-create-span-sidecar-84d458b68c-tsmr5 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:54 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-tsmr5 AddedInterface Add eth0 [10.129.2.62/23] from ovn-kubernetes logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:54 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-tsmr5.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:54 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-tsmr5.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:54 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-tsmr5.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:54 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulCreate Created pod: vertx-create-span-sidecar-84d458b68c-tsmr5 replicaset-controller logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:54 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-84d458b68c to 1 deployment-controller logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:56 +0000 UTC Normal Pod vertx-create-span-sidecar-55454b7978-9n8gl Binding Scheduled Successfully assigned kuttl-test-useful-pelican/vertx-create-span-sidecar-55454b7978-9n8gl to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:56 +0000 UTC Normal Pod vertx-create-span-sidecar-55454b7978-9n8gl AddedInterface Add eth0 [10.128.2.69/23] from ovn-kubernetes logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:56 +0000 UTC Normal Pod vertx-create-span-sidecar-55454b7978-9n8gl.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:56 +0000 UTC Normal Pod vertx-create-span-sidecar-55454b7978-9n8gl.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:56 +0000 UTC Normal Pod vertx-create-span-sidecar-55454b7978-9n8gl.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:56 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-55454b7978 SuccessfulCreate Created pod: vertx-create-span-sidecar-55454b7978-9n8gl replicaset-controller logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:56 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-55454b7978 to 1 deployment-controller logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:57 +0000 UTC Normal Pod vertx-create-span-sidecar-55454b7978-9n8gl.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:57 +0000 UTC Normal Pod vertx-create-span-sidecar-55454b7978-9n8gl.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:26:57 | sidecar-skip-webhook | 2023-12-18 07:26:57 +0000 UTC Normal Pod vertx-create-span-sidecar-55454b7978-9n8gl.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:26:57 | sidecar-skip-webhook | Deleting namespace: kuttl-test-useful-pelican === CONT kuttl/harness/sidecar-deployment logger.go:42: 07:27:04 | sidecar-deployment | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:27:04 | sidecar-deployment | Creating namespace: kuttl-test-improved-molly logger.go:42: 07:27:04 | sidecar-deployment/0-install | starting test step 0-install logger.go:42: 07:27:04 | sidecar-deployment/0-install | Jaeger:kuttl-test-improved-molly/agent-as-sidecar created logger.go:42: 07:27:11 | sidecar-deployment/0-install | test step completed 0-install logger.go:42: 07:27:11 | sidecar-deployment/1-install | starting test step 1-install logger.go:42: 07:27:11 | sidecar-deployment/1-install | Deployment:kuttl-test-improved-molly/vertx-create-span-sidecar created logger.go:42: 07:27:13 | sidecar-deployment/1-install | test step completed 1-install logger.go:42: 07:27:13 | sidecar-deployment/2-enable-injection | starting test step 2-enable-injection logger.go:42: 07:27:13 | sidecar-deployment/2-enable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-improved-molly] logger.go:42: 07:27:13 | sidecar-deployment/2-enable-injection | deployment.apps/vertx-create-span-sidecar annotate logger.go:42: 07:27:15 | sidecar-deployment/2-enable-injection | test step completed 2-enable-injection logger.go:42: 07:27:15 | sidecar-deployment/3-find-service | starting test step 3-find-service logger.go:42: 07:27:15 | sidecar-deployment/3-find-service | Job:kuttl-test-improved-molly/00-find-service created logger.go:42: 07:27:27 | sidecar-deployment/3-find-service | test step completed 3-find-service logger.go:42: 07:27:27 | sidecar-deployment/4-other-instance | starting test step 4-other-instance logger.go:42: 07:27:27 | sidecar-deployment/4-other-instance | Jaeger:kuttl-test-improved-molly/agent-as-sidecar2 created logger.go:42: 07:27:34 | sidecar-deployment/4-other-instance | test step completed 4-other-instance logger.go:42: 07:27:34 | sidecar-deployment/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 07:27:35 | sidecar-deployment/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 07:27:35 | sidecar-deployment/6-find-service | starting test step 6-find-service logger.go:42: 07:27:35 | sidecar-deployment/6-find-service | Job:kuttl-test-improved-molly/01-find-service created logger.go:42: 07:27:47 | sidecar-deployment/6-find-service | test step completed 6-find-service logger.go:42: 07:27:47 | sidecar-deployment/7-disable-injection | starting test step 7-disable-injection logger.go:42: 07:27:47 | sidecar-deployment/7-disable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=false --namespace kuttl-test-improved-molly] logger.go:42: 07:27:47 | sidecar-deployment/7-disable-injection | deployment.apps/vertx-create-span-sidecar annotate logger.go:42: 07:27:49 | sidecar-deployment/7-disable-injection | test step completed 7-disable-injection logger.go:42: 07:27:49 | sidecar-deployment | sidecar-deployment events from ns kuttl-test-improved-molly: logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:07 +0000 UTC Normal Pod agent-as-sidecar-6c89646dfc-vsvrq Binding Scheduled Successfully assigned kuttl-test-improved-molly/agent-as-sidecar-6c89646dfc-vsvrq to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:07 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-6c89646dfc SuccessfulCreate Created pod: agent-as-sidecar-6c89646dfc-vsvrq replicaset-controller logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:07 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-6c89646dfc to 1 deployment-controller logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:08 +0000 UTC Normal Pod agent-as-sidecar-6c89646dfc-vsvrq AddedInterface Add eth0 [10.129.2.63/23] from ovn-kubernetes logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:08 +0000 UTC Normal Pod agent-as-sidecar-6c89646dfc-vsvrq.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:08 +0000 UTC Normal Pod agent-as-sidecar-6c89646dfc-vsvrq.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:08 +0000 UTC Normal Pod agent-as-sidecar-6c89646dfc-vsvrq.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:11 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-28hq9 Binding Scheduled Successfully assigned kuttl-test-improved-molly/vertx-create-span-sidecar-84d458b68c-28hq9 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:11 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulCreate Created pod: vertx-create-span-sidecar-84d458b68c-28hq9 replicaset-controller logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:11 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-84d458b68c to 1 deployment-controller logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:12 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-28hq9 AddedInterface Add eth0 [10.129.2.64/23] from ovn-kubernetes logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:12 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-28hq9.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:12 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-28hq9.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:12 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-28hq9.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:13 +0000 UTC Normal Pod vertx-create-span-sidecar-7d56dc77ff-2s4kh Binding Scheduled Successfully assigned kuttl-test-improved-molly/vertx-create-span-sidecar-7d56dc77ff-2s4kh to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:13 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7d56dc77ff SuccessfulCreate Created pod: vertx-create-span-sidecar-7d56dc77ff-2s4kh replicaset-controller logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:13 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-7d56dc77ff to 1 deployment-controller logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:14 +0000 UTC Normal Pod vertx-create-span-sidecar-7d56dc77ff-2s4kh AddedInterface Add eth0 [10.128.2.70/23] from ovn-kubernetes logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:14 +0000 UTC Normal Pod vertx-create-span-sidecar-7d56dc77ff-2s4kh.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:14 +0000 UTC Normal Pod vertx-create-span-sidecar-7d56dc77ff-2s4kh.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:14 +0000 UTC Normal Pod vertx-create-span-sidecar-7d56dc77ff-2s4kh.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:14 +0000 UTC Normal Pod vertx-create-span-sidecar-7d56dc77ff-2s4kh.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:14 +0000 UTC Normal Pod vertx-create-span-sidecar-7d56dc77ff-2s4kh.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:14 +0000 UTC Normal Pod vertx-create-span-sidecar-7d56dc77ff-2s4kh.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:15 +0000 UTC Normal Pod 00-find-service-r4qvm Binding Scheduled Successfully assigned kuttl-test-improved-molly/00-find-service-r4qvm to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:15 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-r4qvm job-controller logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:16 +0000 UTC Normal Pod 00-find-service-r4qvm AddedInterface Add eth0 [10.131.0.86/23] from ovn-kubernetes logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:16 +0000 UTC Normal Pod 00-find-service-r4qvm.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:16 +0000 UTC Normal Pod 00-find-service-r4qvm.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:16 +0000 UTC Normal Pod 00-find-service-r4qvm.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:20 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-28hq9.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.129.2.64:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:20 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-28hq9.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.64:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:22 +0000 UTC Warning Pod vertx-create-span-sidecar-7d56dc77ff-2s4kh.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.70:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:22 +0000 UTC Warning Pod vertx-create-span-sidecar-7d56dc77ff-2s4kh.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.70:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:22 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-28hq9.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:22 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-28hq9.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.64:8080/": read tcp 10.129.2.2:43598->10.129.2.64:8080: read: connection reset by peer kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:22 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-28hq9.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.64:8080/": dial tcp 10.129.2.64:8080: connect: connection refused kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:24 +0000 UTC Normal Pod vertx-create-span-sidecar-7d56dc77ff-2s4kh.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:24 +0000 UTC Warning Pod vertx-create-span-sidecar-7d56dc77ff-2s4kh.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.70:8080/": read tcp 10.128.2.2:36300->10.128.2.70:8080: read: connection reset by peer kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:24 +0000 UTC Warning Pod vertx-create-span-sidecar-7d56dc77ff-2s4kh.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.70:8080/": dial tcp 10.128.2.70:8080: connect: connection refused kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:27 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:31 +0000 UTC Normal Pod agent-as-sidecar2-6444c674c-4cjks Binding Scheduled Successfully assigned kuttl-test-improved-molly/agent-as-sidecar2-6444c674c-4cjks to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:31 +0000 UTC Normal Pod agent-as-sidecar2-6444c674c-4cjks AddedInterface Add eth0 [10.129.2.65/23] from ovn-kubernetes logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:31 +0000 UTC Normal Pod agent-as-sidecar2-6444c674c-4cjks.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:31 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-6444c674c SuccessfulCreate Created pod: agent-as-sidecar2-6444c674c-4cjks replicaset-controller logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:31 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-6444c674c to 1 deployment-controller logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:32 +0000 UTC Normal Pod agent-as-sidecar2-6444c674c-4cjks.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:32 +0000 UTC Normal Pod agent-as-sidecar2-6444c674c-4cjks.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:32 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-28hq9.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.64:8080/": read tcp 10.129.2.2:59060->10.129.2.64:8080: read: connection reset by peer kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:34 +0000 UTC Normal Pod agent-as-sidecar-6c89646dfc-vsvrq.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:35 +0000 UTC Normal Pod 01-find-service-bdpwh Binding Scheduled Successfully assigned kuttl-test-improved-molly/01-find-service-bdpwh to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:35 +0000 UTC Normal Pod 01-find-service-bdpwh AddedInterface Add eth0 [10.129.2.66/23] from ovn-kubernetes logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:35 +0000 UTC Normal Pod 01-find-service-bdpwh.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-grbb5z6p/pipeline@sha256:20698eb8082cd8170a3f9b90a036900700253097fdd02f2ad2350175b143fa5d" already present on machine kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:35 +0000 UTC Normal Pod 01-find-service-bdpwh.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:35 +0000 UTC Normal Pod 01-find-service-bdpwh.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:35 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-bdpwh job-controller logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:35 +0000 UTC Normal Pod vertx-create-span-sidecar-5948d5b76f-xdbtl Binding Scheduled Successfully assigned kuttl-test-improved-molly/vertx-create-span-sidecar-5948d5b76f-xdbtl to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:35 +0000 UTC Normal Pod vertx-create-span-sidecar-5948d5b76f-xdbtl AddedInterface Add eth0 [10.131.0.87/23] from ovn-kubernetes logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:35 +0000 UTC Normal Pod vertx-create-span-sidecar-5948d5b76f-xdbtl.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:35 +0000 UTC Normal Pod vertx-create-span-sidecar-5948d5b76f-xdbtl.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:35 +0000 UTC Normal Pod vertx-create-span-sidecar-5948d5b76f-xdbtl.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:35 +0000 UTC Normal Pod vertx-create-span-sidecar-5948d5b76f-xdbtl.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:35 +0000 UTC Normal Pod vertx-create-span-sidecar-5948d5b76f-xdbtl.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:35 +0000 UTC Normal Pod vertx-create-span-sidecar-5948d5b76f-xdbtl.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:35 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-5948d5b76f SuccessfulCreate Created pod: vertx-create-span-sidecar-5948d5b76f-xdbtl replicaset-controller logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:35 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulDelete Deleted pod: vertx-create-span-sidecar-84d458b68c-28hq9 replicaset-controller logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:35 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-84d458b68c to 0 from 1 deployment-controller logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:35 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-5948d5b76f to 1 from 0 deployment-controller logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:43 +0000 UTC Warning Pod vertx-create-span-sidecar-5948d5b76f-xdbtl.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.87:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:43 +0000 UTC Warning Pod vertx-create-span-sidecar-5948d5b76f-xdbtl.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.87:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:45 +0000 UTC Normal Pod vertx-create-span-sidecar-5948d5b76f-xdbtl.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:46 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:46 +0000 UTC Warning Pod vertx-create-span-sidecar-5948d5b76f-xdbtl.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.87:8080/": read tcp 10.131.0.2:32936->10.131.0.87:8080: read: connection reset by peer kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:46 +0000 UTC Warning Pod vertx-create-span-sidecar-5948d5b76f-xdbtl.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.87:8080/": dial tcp 10.131.0.87:8080: connect: connection refused kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:47 +0000 UTC Normal Pod vertx-create-span-sidecar-7648855744-4vnn5 Binding Scheduled Successfully assigned kuttl-test-improved-molly/vertx-create-span-sidecar-7648855744-4vnn5 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:47 +0000 UTC Normal Pod vertx-create-span-sidecar-7648855744-4vnn5 AddedInterface Add eth0 [10.129.2.67/23] from ovn-kubernetes logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:47 +0000 UTC Normal Pod vertx-create-span-sidecar-7648855744-4vnn5.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:47 +0000 UTC Normal Pod vertx-create-span-sidecar-7648855744-4vnn5.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:47 +0000 UTC Normal Pod vertx-create-span-sidecar-7648855744-4vnn5.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:47 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7648855744 SuccessfulCreate Created pod: vertx-create-span-sidecar-7648855744-4vnn5 replicaset-controller logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:47 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7d56dc77ff SuccessfulDelete Deleted pod: vertx-create-span-sidecar-7d56dc77ff-2s4kh replicaset-controller logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:47 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-7d56dc77ff to 0 from 1 deployment-controller logger.go:42: 07:27:49 | sidecar-deployment | 2023-12-18 07:27:47 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-7648855744 to 1 from 0 deployment-controller logger.go:42: 07:27:49 | sidecar-deployment | Deleting namespace: kuttl-test-improved-molly === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (142.69s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.10s) --- PASS: kuttl/harness/sidecar-namespace (67.50s) --- PASS: kuttl/harness/sidecar-skip-webhook (16.40s) --- PASS: kuttl/harness/sidecar-deployment (52.65s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name sidecar --report --output /logs/artifacts/sidecar.xml ./artifacts/kuttl-report.xml time="2023-12-18T07:27:58Z" level=debug msg="Setting a new name for the test suites" time="2023-12-18T07:27:58Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-18T07:27:58Z" level=debug msg="normalizing test case names" time="2023-12-18T07:27:58Z" level=debug msg="sidecar/artifacts -> sidecar_artifacts" time="2023-12-18T07:27:58Z" level=debug msg="sidecar/sidecar-namespace -> sidecar_sidecar_namespace" time="2023-12-18T07:27:58Z" level=debug msg="sidecar/sidecar-skip-webhook -> sidecar_sidecar_skip_webhook" time="2023-12-18T07:27:58Z" level=debug msg="sidecar/sidecar-deployment -> sidecar_sidecar_deployment" +------------------------------+--------+ | NAME | RESULT | +------------------------------+--------+ | sidecar_artifacts | passed | | sidecar_sidecar_namespace | passed | | sidecar_sidecar_skip_webhook | passed | | sidecar_sidecar_deployment | passed | +------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh streaming false true + '[' 3 -ne 3 ']' + test_suite_name=streaming + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/streaming.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-streaming make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=3.6.0 \ SKIP_KAFKA=false \ SKIP_ES_EXTERNAL=true \ ./tests/e2e/streaming/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-17-173511 True False 49m Cluster version is 4.15.0-0.nightly-2023-12-17-173511' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-17-173511 True False 49m Cluster version is 4.15.0-0.nightly-2023-12-17-173511' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/streaming/render.sh ++ export SUITE_DIR=./tests/e2e/streaming ++ SUITE_DIR=./tests/e2e/streaming ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/streaming ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + '[' false = true ']' + start_test streaming-simple + '[' 1 -ne 1 ']' + test_name=streaming-simple + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-simple' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-simple\e[0m' Rendering files for test streaming-simple + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build + '[' _build '!=' _build ']' + mkdir -p streaming-simple + cd streaming-simple + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./01-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/streaming-jaeger-assert.yaml.template -o ./04-assert.yaml + render_smoke_test simple-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=simple-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + export JAEGER_NAME=simple-streaming + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-tls + '[' 1 -ne 1 ']' + test_name=streaming-with-tls + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-tls' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-tls\e[0m' Rendering files for test streaming-with-tls + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-simple + '[' streaming-simple '!=' _build ']' + cd .. + mkdir -p streaming-with-tls + cd streaming-with-tls + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./01-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + render_smoke_test tls-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=tls-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + export JAEGER_NAME=tls-streaming + JAEGER_NAME=tls-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-autoprovisioning-autoscale + '[' 1 -ne 1 ']' + test_name=streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-autoprovisioning-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-autoprovisioning-autoscale\e[0m' Rendering files for test streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-with-tls + '[' streaming-with-tls '!=' _build ']' + cd .. + mkdir -p streaming-with-autoprovisioning-autoscale + cd streaming-with-autoprovisioning-autoscale + '[' true = true ']' + rm ./00-install.yaml ./00-assert.yaml + render_install_elasticsearch upstream 01 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=01 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./01-assert.yaml + jaeger_name=auto-provisioned + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="20Mi"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="500m"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.autoscale=true ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.minReplicas=1 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.maxReplicas=2 ./02-install.yaml + render_assert_kafka true auto-provisioned 03 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provisioned + test_step=03 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./03-assert.yaml ++ expr 03 + 1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./04-assert.yaml ++ expr 03 + 2 + CLUSTER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./05-assert.yaml + version_lt 1.27 1.23 ++ echo 1.27 1.23 ++ tr ' ' '\n' ++ sort -rV ++ head -n 1 + test 1.27 '!=' 1.27 + rm ./08-assert.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running streaming E2E tests' Running streaming E2E tests + cd tests/e2e/streaming/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3803946269 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 4 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/streaming-simple === PAUSE kuttl/harness/streaming-simple === RUN kuttl/harness/streaming-with-autoprovisioning-autoscale === PAUSE kuttl/harness/streaming-with-autoprovisioning-autoscale === RUN kuttl/harness/streaming-with-tls === PAUSE kuttl/harness/streaming-with-tls === CONT kuttl/harness/artifacts logger.go:42: 07:28:10 | artifacts | Creating namespace: kuttl-test-central-elf logger.go:42: 07:28:10 | artifacts | artifacts events from ns kuttl-test-central-elf: logger.go:42: 07:28:10 | artifacts | Deleting namespace: kuttl-test-central-elf === CONT kuttl/harness/streaming-with-autoprovisioning-autoscale logger.go:42: 07:28:16 | streaming-with-autoprovisioning-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:28:16 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:28:16 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:28:16 | streaming-with-autoprovisioning-autoscale | Creating namespace: kuttl-test-darling-bobcat logger.go:42: 07:28:16 | streaming-with-autoprovisioning-autoscale/1-install | starting test step 1-install logger.go:42: 07:28:16 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc create sa deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 07:28:16 | streaming-with-autoprovisioning-autoscale/1-install | serviceaccount/deploy-elasticsearch created logger.go:42: 07:28:16 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc adm policy add-scc-to-user privileged -z deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 07:28:17 | streaming-with-autoprovisioning-autoscale/1-install | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:privileged added: "deploy-elasticsearch" logger.go:42: 07:28:17 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 6] logger.go:42: 07:28:23 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_0.yml -n $NAMESPACE] logger.go:42: 07:28:23 | streaming-with-autoprovisioning-autoscale/1-install | statefulset.apps/elasticsearch created logger.go:42: 07:28:23 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 3] logger.go:42: 07:28:26 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_1.yml -n $NAMESPACE] logger.go:42: 07:28:26 | streaming-with-autoprovisioning-autoscale/1-install | service/elasticsearch created logger.go:42: 07:28:44 | streaming-with-autoprovisioning-autoscale/1-install | test step completed 1-install logger.go:42: 07:28:44 | streaming-with-autoprovisioning-autoscale/2-install | starting test step 2-install logger.go:42: 07:28:44 | streaming-with-autoprovisioning-autoscale/2-install | Jaeger:kuttl-test-darling-bobcat/auto-provisioned created logger.go:42: 07:28:44 | streaming-with-autoprovisioning-autoscale/2-install | test step completed 2-install logger.go:42: 07:28:44 | streaming-with-autoprovisioning-autoscale/3- | starting test step 3- logger.go:42: 07:29:27 | streaming-with-autoprovisioning-autoscale/3- | test step completed 3- logger.go:42: 07:29:27 | streaming-with-autoprovisioning-autoscale/4- | starting test step 4- logger.go:42: 07:30:08 | streaming-with-autoprovisioning-autoscale/4- | test step completed 4- logger.go:42: 07:30:08 | streaming-with-autoprovisioning-autoscale/5- | starting test step 5- logger.go:42: 07:30:32 | streaming-with-autoprovisioning-autoscale/5- | test step completed 5- logger.go:42: 07:30:32 | streaming-with-autoprovisioning-autoscale/6- | starting test step 6- logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale/6- | test step completed 6- logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale/7- | starting test step 7- logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale/7- | test step completed 7- logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | streaming-with-autoprovisioning-autoscale events from ns kuttl-test-darling-bobcat: logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:28:23 +0000 UTC Normal Pod elasticsearch-0 Binding Scheduled Successfully assigned kuttl-test-darling-bobcat/elasticsearch-0 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:28:23 +0000 UTC Normal Pod elasticsearch-0 AddedInterface Add eth0 [10.129.2.68/23] from ovn-kubernetes logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:28:23 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulling Pulling image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:28:23 +0000 UTC Normal StatefulSet.apps elasticsearch SuccessfulCreate create Pod elasticsearch-0 in StatefulSet elasticsearch successful statefulset-controller logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:28:31 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulled Successfully pulled image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" in 8.105s (8.105s including waiting) kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:28:32 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:28:32 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:28:38 +0000 UTC Warning Pod elasticsearch-0.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Get "http://10.129.2.68:9200/": dial tcp 10.129.2.68:9200: connect: connection refused kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:28:48 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:28:49 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-zookeeper NoPods No matching pods found controllermanager logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:28:49 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:28:49 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-darling-bobcat/data-auto-provisioned-zookeeper-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-7bb85c6fdf-6mbww_743bd9cc-52e8-435e-a6fb-f129af566a2e logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:28:53 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-6964e39f-a35c-4252-b303-d805ac6b8a3b ebs.csi.aws.com_aws-ebs-csi-driver-controller-7bb85c6fdf-6mbww_743bd9cc-52e8-435e-a6fb-f129af566a2e logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:28:54 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-darling-bobcat/auto-provisioned-zookeeper-0 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:28:56 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-6964e39f-a35c-4252-b303-d805ac6b8a3b" attachdetach-controller logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:28:57 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 AddedInterface Add eth0 [10.129.2.69/23] from ovn-kubernetes logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:28:57 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulling Pulling image "registry.redhat.io/amq-streams/kafka-36-rhel8@sha256:55b18000b2e774adccd0d5393b58fc3b8358ab174eaaa3ada9d520b5cde30669" kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:29:06 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulled Successfully pulled image "registry.redhat.io/amq-streams/kafka-36-rhel8@sha256:55b18000b2e774adccd0d5393b58fc3b8358ab174eaaa3ada9d520b5cde30669" in 8.847s (8.847s including waiting) kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:29:06 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:29:06 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:29:29 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-kafka NoPods No matching pods found controllermanager logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:29:29 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:29:29 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:29:29 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-darling-bobcat/data-0-auto-provisioned-kafka-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-7bb85c6fdf-6mbww_743bd9cc-52e8-435e-a6fb-f129af566a2e logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:29:33 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-0b755748-c147-4012-9be1-9d6ad007d961 ebs.csi.aws.com_aws-ebs-csi-driver-controller-7bb85c6fdf-6mbww_743bd9cc-52e8-435e-a6fb-f129af566a2e logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:29:34 +0000 UTC Normal Pod auto-provisioned-kafka-0 Binding Scheduled Successfully assigned kuttl-test-darling-bobcat/auto-provisioned-kafka-0 to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:29:36 +0000 UTC Normal Pod auto-provisioned-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-0b755748-c147-4012-9be1-9d6ad007d961" attachdetach-controller logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:29:38 +0000 UTC Normal Pod auto-provisioned-kafka-0 AddedInterface Add eth0 [10.131.0.88/23] from ovn-kubernetes logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:29:38 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Pulling Pulling image "registry.redhat.io/amq-streams/kafka-36-rhel8@sha256:55b18000b2e774adccd0d5393b58fc3b8358ab174eaaa3ada9d520b5cde30669" kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:29:45 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Pulled Successfully pulled image "registry.redhat.io/amq-streams/kafka-36-rhel8@sha256:55b18000b2e774adccd0d5393b58fc3b8358ab174eaaa3ada9d520b5cde30669" in 7.507s (7.508s including waiting) kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:29:45 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:29:45 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:11 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c4cccc8dd-z9wll Binding Scheduled Successfully assigned kuttl-test-darling-bobcat/auto-provisioned-entity-operator-7c4cccc8dd-z9wll to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:11 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c4cccc8dd-z9wll AddedInterface Add eth0 [10.131.0.90/23] from ovn-kubernetes logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:11 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c4cccc8dd-z9wll.spec.containers{topic-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:f1be1aa2f18276f9169893eb55e3733cd52fa38f2101a9b3925f79774841689f" already present on machine kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:11 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c4cccc8dd-z9wll.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:11 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c4cccc8dd-z9wll.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:11 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c4cccc8dd-z9wll.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:f1be1aa2f18276f9169893eb55e3733cd52fa38f2101a9b3925f79774841689f" already present on machine kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:11 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c4cccc8dd-z9wll.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:11 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c4cccc8dd-z9wll.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:11 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c4cccc8dd-z9wll.spec.containers{tls-sidecar} Pulled Container image "registry.redhat.io/amq-streams/kafka-36-rhel8@sha256:55b18000b2e774adccd0d5393b58fc3b8358ab174eaaa3ada9d520b5cde30669" already present on machine kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:11 +0000 UTC Normal ReplicaSet.apps auto-provisioned-entity-operator-7c4cccc8dd SuccessfulCreate Created pod: auto-provisioned-entity-operator-7c4cccc8dd-z9wll replicaset-controller logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:11 +0000 UTC Normal Deployment.apps auto-provisioned-entity-operator ScalingReplicaSet Scaled up replica set auto-provisioned-entity-operator-7c4cccc8dd to 1 deployment-controller logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:12 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c4cccc8dd-z9wll.spec.containers{tls-sidecar} Created Created container tls-sidecar kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:12 +0000 UTC Normal Pod auto-provisioned-entity-operator-7c4cccc8dd-z9wll.spec.containers{tls-sidecar} Started Started container tls-sidecar kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:34 +0000 UTC Normal Pod auto-provisioned-collector-5587d849f9-8fgh5 Binding Scheduled Successfully assigned kuttl-test-darling-bobcat/auto-provisioned-collector-5587d849f9-8fgh5 to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:34 +0000 UTC Normal ReplicaSet.apps auto-provisioned-collector-5587d849f9 SuccessfulCreate Created pod: auto-provisioned-collector-5587d849f9-8fgh5 replicaset-controller logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:34 +0000 UTC Normal Deployment.apps auto-provisioned-collector ScalingReplicaSet Scaled up replica set auto-provisioned-collector-5587d849f9 to 1 deployment-controller logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:34 +0000 UTC Normal Pod auto-provisioned-ingester-5c8fcbdb7c-f4x95 Binding Scheduled Successfully assigned kuttl-test-darling-bobcat/auto-provisioned-ingester-5c8fcbdb7c-f4x95 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:34 +0000 UTC Normal ReplicaSet.apps auto-provisioned-ingester-5c8fcbdb7c SuccessfulCreate Created pod: auto-provisioned-ingester-5c8fcbdb7c-f4x95 replicaset-controller logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:34 +0000 UTC Normal Deployment.apps auto-provisioned-ingester ScalingReplicaSet Scaled up replica set auto-provisioned-ingester-5c8fcbdb7c to 1 deployment-controller logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:34 +0000 UTC Normal Pod auto-provisioned-query-b84c4bf8d-45rnr Binding Scheduled Successfully assigned kuttl-test-darling-bobcat/auto-provisioned-query-b84c4bf8d-45rnr to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:34 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-b84c4bf8d SuccessfulCreate Created pod: auto-provisioned-query-b84c4bf8d-45rnr replicaset-controller logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:34 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-b84c4bf8d to 1 deployment-controller logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:35 +0000 UTC Normal Pod auto-provisioned-collector-5587d849f9-8fgh5 AddedInterface Add eth0 [10.128.2.71/23] from ovn-kubernetes logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:35 +0000 UTC Normal Pod auto-provisioned-collector-5587d849f9-8fgh5.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:35 +0000 UTC Normal Pod auto-provisioned-collector-5587d849f9-8fgh5.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:35 +0000 UTC Normal Pod auto-provisioned-collector-5587d849f9-8fgh5.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:35 +0000 UTC Normal Pod auto-provisioned-ingester-5c8fcbdb7c-f4x95 AddedInterface Add eth0 [10.129.2.70/23] from ovn-kubernetes logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:35 +0000 UTC Normal Pod auto-provisioned-ingester-5c8fcbdb7c-f4x95.spec.containers{jaeger-ingester} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:849018528225b7370cc4740fc9f94bef7ffd4195328a916a6013d88f885eebe2" kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:35 +0000 UTC Normal Pod auto-provisioned-query-b84c4bf8d-45rnr AddedInterface Add eth0 [10.128.2.72/23] from ovn-kubernetes logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:35 +0000 UTC Normal Pod auto-provisioned-query-b84c4bf8d-45rnr.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:35 +0000 UTC Normal Pod auto-provisioned-query-b84c4bf8d-45rnr.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:35 +0000 UTC Normal Pod auto-provisioned-query-b84c4bf8d-45rnr.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:35 +0000 UTC Normal Pod auto-provisioned-query-b84c4bf8d-45rnr.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:35 +0000 UTC Normal Pod auto-provisioned-query-b84c4bf8d-45rnr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:35 +0000 UTC Normal Pod auto-provisioned-query-b84c4bf8d-45rnr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:35 +0000 UTC Normal Pod auto-provisioned-query-b84c4bf8d-45rnr.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:35 +0000 UTC Normal Pod auto-provisioned-query-b84c4bf8d-45rnr.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:35 +0000 UTC Normal Pod auto-provisioned-query-b84c4bf8d-45rnr.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:42 +0000 UTC Normal Pod auto-provisioned-ingester-5c8fcbdb7c-f4x95.spec.containers{jaeger-ingester} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:849018528225b7370cc4740fc9f94bef7ffd4195328a916a6013d88f885eebe2" in 6.837s (6.837s including waiting) kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:42 +0000 UTC Normal Pod auto-provisioned-ingester-5c8fcbdb7c-f4x95.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | 2023-12-18 07:30:42 +0000 UTC Normal Pod auto-provisioned-ingester-5c8fcbdb7c-f4x95.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 07:30:43 | streaming-with-autoprovisioning-autoscale | Deleting namespace: kuttl-test-darling-bobcat === CONT kuttl/harness/streaming-with-tls logger.go:42: 07:31:24 | streaming-with-tls | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:31:24 | streaming-with-tls | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:31:24 | streaming-with-tls | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:31:24 | streaming-with-tls | Creating namespace: kuttl-test-true-jay logger.go:42: 07:31:25 | streaming-with-tls/0-install | starting test step 0-install logger.go:42: 07:31:25 | streaming-with-tls/0-install | running command: [sh -c cd /tmp/jaeger-tests && make undeploy-kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true] logger.go:42: 07:31:25 | streaming-with-tls/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 07:31:25 | streaming-with-tls/0-install | >>>> Skiping kafka-operator undeploy logger.go:42: 07:31:25 | streaming-with-tls/0-install | kubectl delete --namespace kuttl-test-true-jay -f tests/_build/kafka-example.yaml 2>&1 || true logger.go:42: 07:31:25 | streaming-with-tls/0-install | error: the path "tests/_build/kafka-example.yaml" does not exist logger.go:42: 07:31:25 | streaming-with-tls/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 07:31:25 | streaming-with-tls/0-install | running command: [sh -c cd /tmp/jaeger-tests && make kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true KAFKA_VERSION=3.6.0] logger.go:42: 07:31:25 | streaming-with-tls/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 07:31:25 | streaming-with-tls/0-install | >>>> Creating namespace kuttl-test-true-jay logger.go:42: 07:31:25 | streaming-with-tls/0-install | kubectl create namespace kuttl-test-true-jay 2>&1 | grep -v "already exists" || true logger.go:42: 07:31:25 | streaming-with-tls/0-install | >>>> Skipping kafka-operator deployment, assuming it has been installed via OperatorHub logger.go:42: 07:31:25 | streaming-with-tls/0-install | >>>> Creating namespace kuttl-test-true-jay logger.go:42: 07:31:25 | streaming-with-tls/0-install | mkdir -p tests/_build/ logger.go:42: 07:31:25 | streaming-with-tls/0-install | kubectl create namespace kuttl-test-true-jay 2>&1 | grep -v "already exists" || true logger.go:42: 07:31:25 | streaming-with-tls/0-install | curl --fail --location "https://raw.githubusercontent.com/strimzi/strimzi-kafka-operator/3.6.0/examples/kafka/kafka-persistent-single.yaml" --output tests/_build/kafka-example.yaml --create-dirs logger.go:42: 07:31:25 | streaming-with-tls/0-install | % Total % Received % Xferd Average Speed Time Time Time Current logger.go:42: 07:31:25 | streaming-with-tls/0-install | Dload Upload Total Spent Left Speed logger.go:42: 07:31:25 | streaming-with-tls/0-install | 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 0 14 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 logger.go:42: 07:31:25 | streaming-with-tls/0-install | curl: (22) The requested URL returned error: 404 logger.go:42: 07:31:25 | streaming-with-tls/0-install | make[2]: *** [Makefile:252: kafka] Error 22 logger.go:42: 07:31:25 | streaming-with-tls/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' case.go:364: failed in step 0-install case.go:366: exit status 2 logger.go:42: 07:31:25 | streaming-with-tls | streaming-with-tls events from ns kuttl-test-true-jay: logger.go:42: 07:31:25 | streaming-with-tls | Deleting namespace: kuttl-test-true-jay === CONT kuttl/harness/streaming-simple logger.go:42: 07:31:31 | streaming-simple | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:31:31 | streaming-simple | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:31:31 | streaming-simple | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:31:31 | streaming-simple | Creating namespace: kuttl-test-knowing-bulldog logger.go:42: 07:31:31 | streaming-simple/0-install | starting test step 0-install logger.go:42: 07:31:31 | streaming-simple/0-install | running command: [sh -c cd /tmp/jaeger-tests && make undeploy-kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true] logger.go:42: 07:31:31 | streaming-simple/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 07:31:31 | streaming-simple/0-install | >>>> Skiping kafka-operator undeploy logger.go:42: 07:31:31 | streaming-simple/0-install | kubectl delete --namespace kuttl-test-knowing-bulldog -f tests/_build/kafka-example.yaml 2>&1 || true logger.go:42: 07:31:31 | streaming-simple/0-install | error: the path "tests/_build/kafka-example.yaml" does not exist logger.go:42: 07:31:31 | streaming-simple/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 07:31:31 | streaming-simple/0-install | running command: [sh -c cd /tmp/jaeger-tests && make kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true KAFKA_VERSION=3.6.0] logger.go:42: 07:31:31 | streaming-simple/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 07:31:31 | streaming-simple/0-install | >>>> Creating namespace kuttl-test-knowing-bulldog logger.go:42: 07:31:31 | streaming-simple/0-install | kubectl create namespace kuttl-test-knowing-bulldog 2>&1 | grep -v "already exists" || true logger.go:42: 07:31:31 | streaming-simple/0-install | >>>> Skipping kafka-operator deployment, assuming it has been installed via OperatorHub logger.go:42: 07:31:31 | streaming-simple/0-install | >>>> Creating namespace kuttl-test-knowing-bulldog logger.go:42: 07:31:31 | streaming-simple/0-install | mkdir -p tests/_build/ logger.go:42: 07:31:31 | streaming-simple/0-install | kubectl create namespace kuttl-test-knowing-bulldog 2>&1 | grep -v "already exists" || true logger.go:42: 07:31:31 | streaming-simple/0-install | curl --fail --location "https://raw.githubusercontent.com/strimzi/strimzi-kafka-operator/3.6.0/examples/kafka/kafka-persistent-single.yaml" --output tests/_build/kafka-example.yaml --create-dirs logger.go:42: 07:31:31 | streaming-simple/0-install | % Total % Received % Xferd Average Speed Time Time Time Current logger.go:42: 07:31:31 | streaming-simple/0-install | Dload Upload Total Spent Left Speed logger.go:42: 07:31:31 | streaming-simple/0-install | 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 0 14 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 logger.go:42: 07:31:31 | streaming-simple/0-install | curl: (22) The requested URL returned error: 404 logger.go:42: 07:31:31 | streaming-simple/0-install | make[2]: *** [Makefile:252: kafka] Error 22 logger.go:42: 07:31:31 | streaming-simple/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' case.go:364: failed in step 0-install case.go:366: exit status 2 logger.go:42: 07:31:31 | streaming-simple | streaming-simple events from ns kuttl-test-knowing-bulldog: logger.go:42: 07:31:31 | streaming-simple | Deleting namespace: kuttl-test-knowing-bulldog === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- FAIL: kuttl (207.47s) --- FAIL: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.14s) --- PASS: kuttl/harness/streaming-with-autoprovisioning-autoscale (188.19s) --- FAIL: kuttl/harness/streaming-with-tls (6.63s) --- FAIL: kuttl/harness/streaming-simple (6.48s) FAIL + exit_code=1 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name streaming --report --output /logs/artifacts/streaming.xml ./artifacts/kuttl-report.xml time="2023-12-18T07:31:38Z" level=debug msg="Setting a new name for the test suites" time="2023-12-18T07:31:38Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-18T07:31:38Z" level=debug msg="normalizing test case names" time="2023-12-18T07:31:38Z" level=debug msg="streaming/artifacts -> streaming_artifacts" time="2023-12-18T07:31:38Z" level=debug msg="streaming/streaming-with-autoprovisioning-autoscale -> streaming_streaming_with_autoprovisioning_autoscale" time="2023-12-18T07:31:38Z" level=debug msg="streaming/streaming-with-tls -> streaming_streaming_with_tls" time="2023-12-18T07:31:38Z" level=debug msg="streaming/streaming-simple -> streaming_streaming_simple" +-----------------------------------------------------+--------+ | NAME | RESULT | +-----------------------------------------------------+--------+ | streaming_artifacts | passed | | streaming_streaming_with_autoprovisioning_autoscale | passed | | streaming_streaming_with_tls | failed | | streaming_streaming_simple | failed | +-----------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh ui false true + '[' 3 -ne 3 ']' + test_suite_name=ui + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/ui.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-ui make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true ./tests/e2e/ui/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-17-173511 True False 53m Cluster version is 4.15.0-0.nightly-2023-12-17-173511' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-17-173511 True False 53m Cluster version is 4.15.0-0.nightly-2023-12-17-173511' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/ui/render.sh ++ export SUITE_DIR=./tests/e2e/ui ++ SUITE_DIR=./tests/e2e/ui ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/ui ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test allinone + '[' 1 -ne 1 ']' + test_name=allinone + echo =========================================================================== =========================================================================== + info 'Rendering files for test allinone' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test allinone\e[0m' Rendering files for test allinone + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build + '[' _build '!=' _build ']' + mkdir -p allinone + cd allinone + export GET_URL_COMMAND + export URL + export JAEGER_NAME=all-in-one-ui + JAEGER_NAME=all-in-one-ui + '[' true = true ']' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./01-curl.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./04-test-ui-config.yaml + start_test production + '[' 1 -ne 1 ']' + test_name=production + echo =========================================================================== =========================================================================== + info 'Rendering files for test production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test production\e[0m' Rendering files for test production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build/allinone + '[' allinone '!=' _build ']' + cd .. + mkdir -p production + cd production + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + [[ true = true ]] + [[ true = true ]] + render_install_jaeger production-ui production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + '[' true = true ']' + INSECURE=true + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-forbbiden-access.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-curl.yaml + INSECURE=true + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./05-check-disabled-security.yaml + ASSERT_PRESENT=false + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./06-check-NO-gaID.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./08-check-gaID.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running ui E2E tests' Running ui E2E tests + cd tests/e2e/ui/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3803946269 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 3 tests === RUN kuttl/harness === RUN kuttl/harness/allinone === PAUSE kuttl/harness/allinone === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/production === PAUSE kuttl/harness/production === CONT kuttl/harness/allinone logger.go:42: 07:31:45 | allinone | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:31:45 | allinone | Creating namespace: kuttl-test-credible-sole logger.go:42: 07:31:45 | allinone/0-install | starting test step 0-install logger.go:42: 07:31:45 | allinone/0-install | Jaeger:kuttl-test-credible-sole/all-in-one-ui created logger.go:42: 07:31:49 | allinone/0-install | test step completed 0-install logger.go:42: 07:31:49 | allinone/1-curl | starting test step 1-curl logger.go:42: 07:31:49 | allinone/1-curl | running command: [./ensure-ingress-host.sh] logger.go:42: 07:31:49 | allinone/1-curl | Checking the Ingress host value was populated logger.go:42: 07:31:49 | allinone/1-curl | Try number 0 logger.go:42: 07:31:49 | allinone/1-curl | Hostname is all-in-one-ui-kuttl-test-credible-sole.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com logger.go:42: 07:31:49 | allinone/1-curl | running command: [sh -c ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE all-in-one-ui] logger.go:42: 07:31:49 | allinone/1-curl | Checking an expected HTTP response logger.go:42: 07:31:49 | allinone/1-curl | Running in OpenShift logger.go:42: 07:31:49 | allinone/1-curl | User not provided. Getting the token... logger.go:42: 07:31:50 | allinone/1-curl | Warning: resource jaegers/all-in-one-ui is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:31:57 | allinone/1-curl | Try number 1/30 the https://all-in-one-ui-kuttl-test-credible-sole.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:31:57 | allinone/1-curl | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:31:57 | allinone/1-curl | Try number 2/30 the https://all-in-one-ui-kuttl-test-credible-sole.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:31:57 | allinone/1-curl | HTTP response is 503. 200 expected. Waiting 10 s logger.go:42: 07:32:07 | allinone/1-curl | Try number 3/30 the https://all-in-one-ui-kuttl-test-credible-sole.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:32:07 | allinone/1-curl | curl response asserted properly logger.go:42: 07:32:07 | allinone/1-curl | test step completed 1-curl logger.go:42: 07:32:07 | allinone/2-delete | starting test step 2-delete logger.go:42: 07:32:07 | allinone/2-delete | Jaeger:kuttl-test-credible-sole/all-in-one-ui created logger.go:42: 07:32:07 | allinone/2-delete | test step completed 2-delete logger.go:42: 07:32:07 | allinone/3-install | starting test step 3-install logger.go:42: 07:32:07 | allinone/3-install | Jaeger:kuttl-test-credible-sole/all-in-one-ui updated logger.go:42: 07:32:07 | allinone/3-install | test step completed 3-install logger.go:42: 07:32:07 | allinone/4-test-ui-config | starting test step 4-test-ui-config logger.go:42: 07:32:07 | allinone/4-test-ui-config | running command: [./ensure-ingress-host.sh] logger.go:42: 07:32:07 | allinone/4-test-ui-config | Checking the Ingress host value was populated logger.go:42: 07:32:07 | allinone/4-test-ui-config | Try number 0 logger.go:42: 07:32:07 | allinone/4-test-ui-config | error: error executing jsonpath "{.items[0].status.ingress[0].host}": Error executing template: array index out of bounds: index 0, length 0. Printing more information for debugging the template: logger.go:42: 07:32:07 | allinone/4-test-ui-config | template was: logger.go:42: 07:32:07 | allinone/4-test-ui-config | {.items[0].status.ingress[0].host} logger.go:42: 07:32:07 | allinone/4-test-ui-config | object given to jsonpath engine was: logger.go:42: 07:32:07 | allinone/4-test-ui-config | map[string]interface {}{"apiVersion":"v1", "items":[]interface {}{}, "kind":"List", "metadata":map[string]interface {}{"resourceVersion":""}} logger.go:42: 07:32:07 | allinone/4-test-ui-config | logger.go:42: 07:32:07 | allinone/4-test-ui-config | logger.go:42: 07:32:17 | allinone/4-test-ui-config | Try number 1 logger.go:42: 07:32:17 | allinone/4-test-ui-config | Hostname is all-in-one-ui-kuttl-test-credible-sole.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com logger.go:42: 07:32:17 | allinone/4-test-ui-config | running command: [sh -c ASSERT_PRESENT=true EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 07:32:18 | allinone/4-test-ui-config | time="2023-12-18T07:32:18Z" level=info msg="Querying https://all-in-one-ui-kuttl-test-credible-sole.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search..." logger.go:42: 07:32:18 | allinone/4-test-ui-config | time="2023-12-18T07:32:18Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 07:32:18 | allinone/4-test-ui-config | time="2023-12-18T07:32:18Z" level=info msg="Polling to https://all-in-one-ui-kuttl-test-credible-sole.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search" logger.go:42: 07:32:18 | allinone/4-test-ui-config | time="2023-12-18T07:32:18Z" level=info msg="Doing request number 0" logger.go:42: 07:32:18 | allinone/4-test-ui-config | time="2023-12-18T07:32:18Z" level=info msg="Content found and asserted!" logger.go:42: 07:32:18 | allinone/4-test-ui-config | time="2023-12-18T07:32:18Z" level=info msg="Success!" logger.go:42: 07:32:18 | allinone/4-test-ui-config | test step completed 4-test-ui-config logger.go:42: 07:32:18 | allinone | allinone events from ns kuttl-test-credible-sole: logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:48 +0000 UTC Normal Pod all-in-one-ui-7c5f7b8f4f-wtvf4 Binding Scheduled Successfully assigned kuttl-test-credible-sole/all-in-one-ui-7c5f7b8f4f-wtvf4 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:48 +0000 UTC Warning Pod all-in-one-ui-7c5f7b8f4f-wtvf4 FailedMount MountVolume.SetUp failed for volume "all-in-one-ui-ui-oauth-proxy-tls" : secret "all-in-one-ui-ui-oauth-proxy-tls" not found kubelet logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:48 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-7c5f7b8f4f SuccessfulCreate Created pod: all-in-one-ui-7c5f7b8f4f-wtvf4 replicaset-controller logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:48 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-7c5f7b8f4f to 1 deployment-controller logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:49 +0000 UTC Normal Pod all-in-one-ui-7c5f7b8f4f-wtvf4 AddedInterface Add eth0 [10.129.2.71/23] from ovn-kubernetes logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:49 +0000 UTC Normal Pod all-in-one-ui-7c5f7b8f4f-wtvf4.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:49 +0000 UTC Normal Pod all-in-one-ui-7c5f7b8f4f-wtvf4.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:49 +0000 UTC Normal Pod all-in-one-ui-7c5f7b8f4f-wtvf4.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:49 +0000 UTC Normal Pod all-in-one-ui-7c5f7b8f4f-wtvf4.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:49 +0000 UTC Normal Pod all-in-one-ui-7c5f7b8f4f-wtvf4.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:49 +0000 UTC Normal Pod all-in-one-ui-7c5f7b8f4f-wtvf4.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:53 +0000 UTC Normal Pod all-in-one-ui-7c5f7b8f4f-wtvf4.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:53 +0000 UTC Normal Pod all-in-one-ui-7c5f7b8f4f-wtvf4.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:53 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-7c5f7b8f4f SuccessfulDelete Deleted pod: all-in-one-ui-7c5f7b8f4f-wtvf4 replicaset-controller logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:53 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled down replica set all-in-one-ui-7c5f7b8f4f to 0 from 1 deployment-controller logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:54 +0000 UTC Normal Pod all-in-one-ui-94db4d784-6qwzn Binding Scheduled Successfully assigned kuttl-test-credible-sole/all-in-one-ui-94db4d784-6qwzn to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:54 +0000 UTC Normal Pod all-in-one-ui-94db4d784-6qwzn AddedInterface Add eth0 [10.129.2.72/23] from ovn-kubernetes logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:54 +0000 UTC Normal Pod all-in-one-ui-94db4d784-6qwzn.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:54 +0000 UTC Normal Pod all-in-one-ui-94db4d784-6qwzn.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:54 +0000 UTC Normal Pod all-in-one-ui-94db4d784-6qwzn.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:54 +0000 UTC Normal Pod all-in-one-ui-94db4d784-6qwzn.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:54 +0000 UTC Normal Pod all-in-one-ui-94db4d784-6qwzn.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:54 +0000 UTC Normal Pod all-in-one-ui-94db4d784-6qwzn.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:54 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-94db4d784 SuccessfulCreate Created pod: all-in-one-ui-94db4d784-6qwzn replicaset-controller logger.go:42: 07:32:18 | allinone | 2023-12-18 07:31:54 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-94db4d784 to 1 deployment-controller logger.go:42: 07:32:18 | allinone | 2023-12-18 07:32:07 +0000 UTC Normal Pod all-in-one-ui-94db4d784-6qwzn.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:32:18 | allinone | 2023-12-18 07:32:07 +0000 UTC Normal Pod all-in-one-ui-94db4d784-6qwzn.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:32:18 | allinone | 2023-12-18 07:32:11 +0000 UTC Normal Pod all-in-one-ui-7ddb6f6dfd-skqzw Binding Scheduled Successfully assigned kuttl-test-credible-sole/all-in-one-ui-7ddb6f6dfd-skqzw to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:32:18 | allinone | 2023-12-18 07:32:11 +0000 UTC Normal Pod all-in-one-ui-7ddb6f6dfd-skqzw AddedInterface Add eth0 [10.129.2.73/23] from ovn-kubernetes logger.go:42: 07:32:18 | allinone | 2023-12-18 07:32:11 +0000 UTC Normal Pod all-in-one-ui-7ddb6f6dfd-skqzw.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:a21c5ae4339682904e8075a391a6cee11ddf66142ba0b3e9150c89f1c77ff373" already present on machine kubelet logger.go:42: 07:32:18 | allinone | 2023-12-18 07:32:11 +0000 UTC Normal Pod all-in-one-ui-7ddb6f6dfd-skqzw.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:32:18 | allinone | 2023-12-18 07:32:11 +0000 UTC Normal Pod all-in-one-ui-7ddb6f6dfd-skqzw.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:32:18 | allinone | 2023-12-18 07:32:11 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-7ddb6f6dfd SuccessfulCreate Created pod: all-in-one-ui-7ddb6f6dfd-skqzw replicaset-controller logger.go:42: 07:32:18 | allinone | 2023-12-18 07:32:11 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-7ddb6f6dfd to 1 deployment-controller logger.go:42: 07:32:18 | allinone | Deleting namespace: kuttl-test-credible-sole === CONT kuttl/harness/production logger.go:42: 07:32:24 | production | Ignoring add-tracking-id.yaml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:32:24 | production | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:32:24 | production | Creating namespace: kuttl-test-main-skylark logger.go:42: 07:32:24 | production/1-install | starting test step 1-install logger.go:42: 07:32:24 | production/1-install | Jaeger:kuttl-test-main-skylark/production-ui created logger.go:42: 07:33:02 | production/1-install | test step completed 1-install logger.go:42: 07:33:02 | production/2-check-forbbiden-access | starting test step 2-check-forbbiden-access logger.go:42: 07:33:02 | production/2-check-forbbiden-access | running command: [./ensure-ingress-host.sh] logger.go:42: 07:33:02 | production/2-check-forbbiden-access | Checking the Ingress host value was populated logger.go:42: 07:33:02 | production/2-check-forbbiden-access | Try number 0 logger.go:42: 07:33:02 | production/2-check-forbbiden-access | Hostname is production-ui-kuttl-test-main-skylark.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com logger.go:42: 07:33:02 | production/2-check-forbbiden-access | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE production-ui] logger.go:42: 07:33:02 | production/2-check-forbbiden-access | Checking an expected HTTP response logger.go:42: 07:33:02 | production/2-check-forbbiden-access | Running in OpenShift logger.go:42: 07:33:02 | production/2-check-forbbiden-access | Not using any secret logger.go:42: 07:33:02 | production/2-check-forbbiden-access | Try number 1/30 the https://production-ui-kuttl-test-main-skylark.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:33:02 | production/2-check-forbbiden-access | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:33:02 | production/2-check-forbbiden-access | Try number 2/30 the https://production-ui-kuttl-test-main-skylark.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:33:02 | production/2-check-forbbiden-access | HTTP response is 503. 403 expected. Waiting 10 s logger.go:42: 07:33:12 | production/2-check-forbbiden-access | Try number 3/30 the https://production-ui-kuttl-test-main-skylark.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:33:12 | production/2-check-forbbiden-access | curl response asserted properly logger.go:42: 07:33:12 | production/2-check-forbbiden-access | test step completed 2-check-forbbiden-access logger.go:42: 07:33:12 | production/3-curl | starting test step 3-curl logger.go:42: 07:33:12 | production/3-curl | running command: [./ensure-ingress-host.sh] logger.go:42: 07:33:12 | production/3-curl | Checking the Ingress host value was populated logger.go:42: 07:33:12 | production/3-curl | Try number 0 logger.go:42: 07:33:12 | production/3-curl | Hostname is production-ui-kuttl-test-main-skylark.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com logger.go:42: 07:33:12 | production/3-curl | running command: [sh -c ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE production-ui] logger.go:42: 07:33:12 | production/3-curl | Checking an expected HTTP response logger.go:42: 07:33:12 | production/3-curl | Running in OpenShift logger.go:42: 07:33:12 | production/3-curl | User not provided. Getting the token... logger.go:42: 07:33:14 | production/3-curl | Warning: resource jaegers/production-ui is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:33:20 | production/3-curl | Try number 1/30 the https://production-ui-kuttl-test-main-skylark.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:33:20 | production/3-curl | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:33:20 | production/3-curl | Try number 2/30 the https://production-ui-kuttl-test-main-skylark.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:33:20 | production/3-curl | HTTP response is 503. 200 expected. Waiting 10 s logger.go:42: 07:33:30 | production/3-curl | Try number 3/30 the https://production-ui-kuttl-test-main-skylark.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:33:30 | production/3-curl | curl response asserted properly logger.go:42: 07:33:30 | production/3-curl | test step completed 3-curl logger.go:42: 07:33:30 | production/4-install | starting test step 4-install logger.go:42: 07:33:30 | production/4-install | Jaeger:kuttl-test-main-skylark/production-ui updated logger.go:42: 07:33:30 | production/4-install | test step completed 4-install logger.go:42: 07:33:30 | production/5-check-disabled-security | starting test step 5-check-disabled-security logger.go:42: 07:33:30 | production/5-check-disabled-security | running command: [./ensure-ingress-host.sh] logger.go:42: 07:33:30 | production/5-check-disabled-security | Checking the Ingress host value was populated logger.go:42: 07:33:30 | production/5-check-disabled-security | Try number 0 logger.go:42: 07:33:30 | production/5-check-disabled-security | Hostname is production-ui-kuttl-test-main-skylark.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com logger.go:42: 07:33:30 | production/5-check-disabled-security | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE production-ui] logger.go:42: 07:33:30 | production/5-check-disabled-security | Checking an expected HTTP response logger.go:42: 07:33:30 | production/5-check-disabled-security | Running in OpenShift logger.go:42: 07:33:30 | production/5-check-disabled-security | Not using any secret logger.go:42: 07:33:30 | production/5-check-disabled-security | Try number 1/30 the https://production-ui-kuttl-test-main-skylark.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:33:30 | production/5-check-disabled-security | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:33:30 | production/5-check-disabled-security | Try number 2/30 the https://production-ui-kuttl-test-main-skylark.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:33:30 | production/5-check-disabled-security | HTTP response is 403. 200 expected. Waiting 10 s logger.go:42: 07:33:40 | production/5-check-disabled-security | Try number 3/30 the https://production-ui-kuttl-test-main-skylark.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:33:40 | production/5-check-disabled-security | curl response asserted properly logger.go:42: 07:33:40 | production/5-check-disabled-security | test step completed 5-check-disabled-security logger.go:42: 07:33:40 | production/6-check-NO-gaID | starting test step 6-check-NO-gaID logger.go:42: 07:33:40 | production/6-check-NO-gaID | running command: [./ensure-ingress-host.sh] logger.go:42: 07:33:40 | production/6-check-NO-gaID | Checking the Ingress host value was populated logger.go:42: 07:33:40 | production/6-check-NO-gaID | Try number 0 logger.go:42: 07:33:41 | production/6-check-NO-gaID | Hostname is production-ui-kuttl-test-main-skylark.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com logger.go:42: 07:33:41 | production/6-check-NO-gaID | running command: [sh -c ASSERT_PRESENT=false EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 07:33:41 | production/6-check-NO-gaID | time="2023-12-18T07:33:41Z" level=info msg="Querying https://production-ui-kuttl-test-main-skylark.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search..." logger.go:42: 07:33:41 | production/6-check-NO-gaID | time="2023-12-18T07:33:41Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 07:33:41 | production/6-check-NO-gaID | time="2023-12-18T07:33:41Z" level=info msg="Polling to https://production-ui-kuttl-test-main-skylark.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search" logger.go:42: 07:33:41 | production/6-check-NO-gaID | time="2023-12-18T07:33:41Z" level=info msg="Doing request number 0" logger.go:42: 07:33:41 | production/6-check-NO-gaID | time="2023-12-18T07:33:41Z" level=info msg="Content not found and asserted it was not found!" logger.go:42: 07:33:41 | production/6-check-NO-gaID | time="2023-12-18T07:33:41Z" level=info msg="Success!" logger.go:42: 07:33:41 | production/6-check-NO-gaID | test step completed 6-check-NO-gaID logger.go:42: 07:33:41 | production/7-add-tracking-id | starting test step 7-add-tracking-id logger.go:42: 07:33:41 | production/7-add-tracking-id | running command: [sh -c kubectl apply -f add-tracking-id.yaml -n $NAMESPACE] logger.go:42: 07:33:41 | production/7-add-tracking-id | jaeger.jaegertracing.io/production-ui configured logger.go:42: 07:33:41 | production/7-add-tracking-id | test step completed 7-add-tracking-id logger.go:42: 07:33:41 | production/8-check-gaID | starting test step 8-check-gaID logger.go:42: 07:33:41 | production/8-check-gaID | running command: [./ensure-ingress-host.sh] logger.go:42: 07:33:41 | production/8-check-gaID | Checking the Ingress host value was populated logger.go:42: 07:33:41 | production/8-check-gaID | Try number 0 logger.go:42: 07:33:41 | production/8-check-gaID | Hostname is production-ui-kuttl-test-main-skylark.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com logger.go:42: 07:33:41 | production/8-check-gaID | running command: [sh -c ASSERT_PRESENT=true EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 07:33:42 | production/8-check-gaID | time="2023-12-18T07:33:42Z" level=info msg="Querying https://production-ui-kuttl-test-main-skylark.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search..." logger.go:42: 07:33:42 | production/8-check-gaID | time="2023-12-18T07:33:42Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 07:33:42 | production/8-check-gaID | time="2023-12-18T07:33:42Z" level=info msg="Polling to https://production-ui-kuttl-test-main-skylark.apps.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com/search" logger.go:42: 07:33:42 | production/8-check-gaID | time="2023-12-18T07:33:42Z" level=info msg="Doing request number 0" logger.go:42: 07:33:42 | production/8-check-gaID | time="2023-12-18T07:33:42Z" level=warning msg="Found: false . Assert: true" logger.go:42: 07:33:42 | production/8-check-gaID | time="2023-12-18T07:33:42Z" level=warning msg="The condition of the test function was not accomplished" logger.go:42: 07:33:42 | production/8-check-gaID | time="2023-12-18T07:33:42Z" level=info msg="Doing request number 1" logger.go:42: 07:33:42 | production/8-check-gaID | time="2023-12-18T07:33:42Z" level=warning msg="Found: false . Assert: true" logger.go:42: 07:33:42 | production/8-check-gaID | time="2023-12-18T07:33:42Z" level=warning msg="The condition of the test function was not accomplished" logger.go:42: 07:33:50 | production/8-check-gaID | time="2023-12-18T07:33:50Z" level=info msg="Doing request number 2" logger.go:42: 07:33:50 | production/8-check-gaID | time="2023-12-18T07:33:50Z" level=info msg="Content found and asserted!" logger.go:42: 07:33:50 | production/8-check-gaID | time="2023-12-18T07:33:50Z" level=info msg="Success!" logger.go:42: 07:33:50 | production/8-check-gaID | test step completed 8-check-gaID logger.go:42: 07:33:50 | production | production events from ns kuttl-test-main-skylark: logger.go:42: 07:33:50 | production | 2023-12-18 07:32:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmainskylarkproductionui-1-5d986bqs62 Binding Scheduled Successfully assigned kuttl-test-main-skylark/elasticsearch-cdm-kuttltestmainskylarkproductionui-1-5d986bqs62 to ip-10-0-99-179.ec2.internal default-scheduler logger.go:42: 07:33:50 | production | 2023-12-18 07:32:31 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestmainskylarkproductionui-1-5d986d8f7f SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestmainskylarkproductionui-1-5d986bqs62 replicaset-controller logger.go:42: 07:33:50 | production | 2023-12-18 07:32:31 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestmainskylarkproductionui-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestmainskylarkproductionui-1-5d986d8f7f to 1 deployment-controller logger.go:42: 07:33:50 | production | 2023-12-18 07:32:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmainskylarkproductionui-1-5d986bqs62 AddedInterface Add eth0 [10.129.2.74/23] from ovn-kubernetes logger.go:42: 07:33:50 | production | 2023-12-18 07:32:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmainskylarkproductionui-1-5d986bqs62.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:4587958c428b40ac31b46a96a752c2b338814895891023b3ba96ce4c12b5906d" already present on machine kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:32:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmainskylarkproductionui-1-5d986bqs62.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:32:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmainskylarkproductionui-1-5d986bqs62.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:32:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmainskylarkproductionui-1-5d986bqs62.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:8c14e8afb359f1b4ea4b8ed370d15d3b0c272b39ba3b90e5b314f3ccb5ac69c5" already present on machine kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:32:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmainskylarkproductionui-1-5d986bqs62.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:32:32 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmainskylarkproductionui-1-5d986bqs62.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:32:42 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestmainskylarkproductionui-1-5d986bqs62.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:32:47 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestmainskylarkproductionui-1-5d986bqs62.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:32:59 +0000 UTC Normal Pod production-ui-collector-8575999644-8plvb Binding Scheduled Successfully assigned kuttl-test-main-skylark/production-ui-collector-8575999644-8plvb to ip-10-0-0-85.ec2.internal default-scheduler logger.go:42: 07:33:50 | production | 2023-12-18 07:32:59 +0000 UTC Normal Pod production-ui-collector-8575999644-8plvb AddedInterface Add eth0 [10.128.2.73/23] from ovn-kubernetes logger.go:42: 07:33:50 | production | 2023-12-18 07:32:59 +0000 UTC Normal Pod production-ui-collector-8575999644-8plvb.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:9ec63242b45d2dd0e95ce79b5f0e04736cda1363b76c9c0d7453e92a26442fd9" already present on machine kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:32:59 +0000 UTC Normal Pod production-ui-collector-8575999644-8plvb.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:32:59 +0000 UTC Normal Pod production-ui-collector-8575999644-8plvb.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:32:59 +0000 UTC Normal ReplicaSet.apps production-ui-collector-8575999644 SuccessfulCreate Created pod: production-ui-collector-8575999644-8plvb replicaset-controller logger.go:42: 07:33:50 | production | 2023-12-18 07:32:59 +0000 UTC Normal Deployment.apps production-ui-collector ScalingReplicaSet Scaled up replica set production-ui-collector-8575999644 to 1 deployment-controller logger.go:42: 07:33:50 | production | 2023-12-18 07:32:59 +0000 UTC Normal Pod production-ui-query-6ff55fc68b-9gk8k Binding Scheduled Successfully assigned kuttl-test-main-skylark/production-ui-query-6ff55fc68b-9gk8k to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:33:50 | production | 2023-12-18 07:32:59 +0000 UTC Normal Pod production-ui-query-6ff55fc68b-9gk8k AddedInterface Add eth0 [10.131.0.91/23] from ovn-kubernetes logger.go:42: 07:33:50 | production | 2023-12-18 07:32:59 +0000 UTC Normal Pod production-ui-query-6ff55fc68b-9gk8k.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:32:59 +0000 UTC Normal Pod production-ui-query-6ff55fc68b-9gk8k.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:32:59 +0000 UTC Normal Pod production-ui-query-6ff55fc68b-9gk8k.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:32:59 +0000 UTC Normal Pod production-ui-query-6ff55fc68b-9gk8k.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:32:59 +0000 UTC Normal Pod production-ui-query-6ff55fc68b-9gk8k.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:32:59 +0000 UTC Normal Pod production-ui-query-6ff55fc68b-9gk8k.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:32:59 +0000 UTC Normal Pod production-ui-query-6ff55fc68b-9gk8k.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:32:59 +0000 UTC Normal Pod production-ui-query-6ff55fc68b-9gk8k.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:32:59 +0000 UTC Normal Pod production-ui-query-6ff55fc68b-9gk8k.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:32:59 +0000 UTC Normal ReplicaSet.apps production-ui-query-6ff55fc68b SuccessfulCreate Created pod: production-ui-query-6ff55fc68b-9gk8k replicaset-controller logger.go:42: 07:33:50 | production | 2023-12-18 07:32:59 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-6ff55fc68b to 1 deployment-controller logger.go:42: 07:33:50 | production | 2023-12-18 07:33:14 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:33:50 | production | 2023-12-18 07:33:14 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:33:50 | production | 2023-12-18 07:33:14 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:33:50 | production | 2023-12-18 07:33:15 +0000 UTC Normal Pod production-ui-query-6ff55fc68b-9gk8k.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:15 +0000 UTC Normal Pod production-ui-query-6ff55fc68b-9gk8k.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:15 +0000 UTC Normal Pod production-ui-query-6ff55fc68b-9gk8k.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:15 +0000 UTC Normal ReplicaSet.apps production-ui-query-6ff55fc68b SuccessfulDelete Deleted pod: production-ui-query-6ff55fc68b-9gk8k replicaset-controller logger.go:42: 07:33:50 | production | 2023-12-18 07:33:15 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-6ff55fc68b to 0 from 1 deployment-controller logger.go:42: 07:33:50 | production | 2023-12-18 07:33:16 +0000 UTC Normal Pod production-ui-query-74fc86cd47-qth9b Binding Scheduled Successfully assigned kuttl-test-main-skylark/production-ui-query-74fc86cd47-qth9b to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:33:50 | production | 2023-12-18 07:33:16 +0000 UTC Normal ReplicaSet.apps production-ui-query-74fc86cd47 SuccessfulCreate Created pod: production-ui-query-74fc86cd47-qth9b replicaset-controller logger.go:42: 07:33:50 | production | 2023-12-18 07:33:16 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-74fc86cd47 to 1 deployment-controller logger.go:42: 07:33:50 | production | 2023-12-18 07:33:17 +0000 UTC Normal Pod production-ui-query-74fc86cd47-qth9b AddedInterface Add eth0 [10.131.0.92/23] from ovn-kubernetes logger.go:42: 07:33:50 | production | 2023-12-18 07:33:17 +0000 UTC Normal Pod production-ui-query-74fc86cd47-qth9b.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:17 +0000 UTC Normal Pod production-ui-query-74fc86cd47-qth9b.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:17 +0000 UTC Normal Pod production-ui-query-74fc86cd47-qth9b.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:17 +0000 UTC Normal Pod production-ui-query-74fc86cd47-qth9b.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:219bf2d14157acd90298df58bfe77c2e3ed51ce0c743c2e51b3ed54b73dafc14" already present on machine kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:17 +0000 UTC Normal Pod production-ui-query-74fc86cd47-qth9b.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:17 +0000 UTC Normal Pod production-ui-query-74fc86cd47-qth9b.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:17 +0000 UTC Normal Pod production-ui-query-74fc86cd47-qth9b.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:17 +0000 UTC Normal Pod production-ui-query-74fc86cd47-qth9b.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:17 +0000 UTC Normal Pod production-ui-query-74fc86cd47-qth9b.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:32 +0000 UTC Normal Pod production-ui-query-64cdbc569f-hdx9q Binding Scheduled Successfully assigned kuttl-test-main-skylark/production-ui-query-64cdbc569f-hdx9q to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:33:50 | production | 2023-12-18 07:33:32 +0000 UTC Normal Pod production-ui-query-64cdbc569f-hdx9q AddedInterface Add eth0 [10.131.0.93/23] from ovn-kubernetes logger.go:42: 07:33:50 | production | 2023-12-18 07:33:32 +0000 UTC Normal Pod production-ui-query-64cdbc569f-hdx9q.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:32 +0000 UTC Normal ReplicaSet.apps production-ui-query-64cdbc569f SuccessfulCreate Created pod: production-ui-query-64cdbc569f-hdx9q replicaset-controller logger.go:42: 07:33:50 | production | 2023-12-18 07:33:32 +0000 UTC Normal Pod production-ui-query-74fc86cd47-qth9b.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:32 +0000 UTC Normal Pod production-ui-query-74fc86cd47-qth9b.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:32 +0000 UTC Normal Pod production-ui-query-74fc86cd47-qth9b.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:32 +0000 UTC Normal ReplicaSet.apps production-ui-query-74fc86cd47 SuccessfulDelete Deleted pod: production-ui-query-74fc86cd47-qth9b replicaset-controller logger.go:42: 07:33:50 | production | 2023-12-18 07:33:32 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-74fc86cd47 to 0 from 1 deployment-controller logger.go:42: 07:33:50 | production | 2023-12-18 07:33:32 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-64cdbc569f to 1 deployment-controller logger.go:42: 07:33:50 | production | 2023-12-18 07:33:33 +0000 UTC Normal Pod production-ui-query-64cdbc569f-hdx9q.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:33 +0000 UTC Normal Pod production-ui-query-64cdbc569f-hdx9q.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:33 +0000 UTC Normal Pod production-ui-query-64cdbc569f-hdx9q.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:33 +0000 UTC Normal Pod production-ui-query-64cdbc569f-hdx9q.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:33 +0000 UTC Normal Pod production-ui-query-64cdbc569f-hdx9q.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:43 +0000 UTC Normal Pod production-ui-query-64cdbc569f-hdx9q.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:43 +0000 UTC Normal Pod production-ui-query-64cdbc569f-hdx9q.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:43 +0000 UTC Normal ReplicaSet.apps production-ui-query-64cdbc569f SuccessfulDelete Deleted pod: production-ui-query-64cdbc569f-hdx9q replicaset-controller logger.go:42: 07:33:50 | production | 2023-12-18 07:33:43 +0000 UTC Normal Pod production-ui-query-b5db5c964-6x6v9 Binding Scheduled Successfully assigned kuttl-test-main-skylark/production-ui-query-b5db5c964-6x6v9 to ip-10-0-98-211.ec2.internal default-scheduler logger.go:42: 07:33:50 | production | 2023-12-18 07:33:43 +0000 UTC Normal Pod production-ui-query-b5db5c964-6x6v9 AddedInterface Add eth0 [10.131.0.94/23] from ovn-kubernetes logger.go:42: 07:33:50 | production | 2023-12-18 07:33:43 +0000 UTC Normal ReplicaSet.apps production-ui-query-b5db5c964 SuccessfulCreate Created pod: production-ui-query-b5db5c964-6x6v9 replicaset-controller logger.go:42: 07:33:50 | production | 2023-12-18 07:33:43 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-64cdbc569f to 0 from 1 deployment-controller logger.go:42: 07:33:50 | production | 2023-12-18 07:33:43 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-b5db5c964 to 1 deployment-controller logger.go:42: 07:33:50 | production | 2023-12-18 07:33:44 +0000 UTC Normal Pod production-ui-query-b5db5c964-6x6v9.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:116963b147bccac665fa10f28107468699d5ef632b5c86710a3e900423b404a9" already present on machine kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:44 +0000 UTC Normal Pod production-ui-query-b5db5c964-6x6v9.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:44 +0000 UTC Normal Pod production-ui-query-b5db5c964-6x6v9.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:44 +0000 UTC Normal Pod production-ui-query-b5db5c964-6x6v9.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:4437924ff09250ff83e94a4afa7bcbd46231e10ab55747023fe9c072d1484c79" already present on machine kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:44 +0000 UTC Normal Pod production-ui-query-b5db5c964-6x6v9.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:33:50 | production | 2023-12-18 07:33:44 +0000 UTC Normal Pod production-ui-query-b5db5c964-6x6v9.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:33:50 | production | Deleting namespace: kuttl-test-main-skylark === CONT kuttl/harness/artifacts logger.go:42: 07:33:57 | artifacts | Creating namespace: kuttl-test-fleet-mite logger.go:42: 07:33:57 | artifacts | artifacts events from ns kuttl-test-fleet-mite: logger.go:42: 07:33:57 | artifacts | Deleting namespace: kuttl-test-fleet-mite === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (138.53s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/allinone (39.55s) --- PASS: kuttl/harness/production (92.79s) --- PASS: kuttl/harness/artifacts (6.14s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name ui --report --output /logs/artifacts/ui.xml ./artifacts/kuttl-report.xml time="2023-12-18T07:34:03Z" level=debug msg="Setting a new name for the test suites" time="2023-12-18T07:34:03Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-18T07:34:03Z" level=debug msg="normalizing test case names" time="2023-12-18T07:34:03Z" level=debug msg="ui/allinone -> ui_allinone" time="2023-12-18T07:34:03Z" level=debug msg="ui/production -> ui_production" time="2023-12-18T07:34:03Z" level=debug msg="ui/artifacts -> ui_artifacts" +---------------+--------+ | NAME | RESULT | +---------------+--------+ | ui_allinone | passed | | ui_production | passed | | ui_artifacts | passed | +---------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh upgrade false true + '[' 3 -ne 3 ']' + test_suite_name=upgrade + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/upgrade.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-upgrade make[2]: Entering directory '/tmp/jaeger-tests' make docker JAEGER_VERSION=1.52.1 IMG="quay.io//jaeger-operator:next" make[3]: Entering directory '/tmp/jaeger-tests' [ ! -z "true" ] || docker build --build-arg=GOPROXY= --build-arg=VERSION="1.52.0" --build-arg=JAEGER_VERSION=1.52.1 --build-arg=TARGETARCH= --build-arg VERSION_DATE=2023-12-18T07:34:03Z --build-arg VERSION_PKG="github.com/jaegertracing/jaeger-operator/pkg/version" -t "quay.io//jaeger-operator:next" . make[3]: Leaving directory '/tmp/jaeger-tests' touch build-e2e-upgrade-image SKIP_ES_EXTERNAL=true IMG=quay.io//jaeger-operator:"1.52.0" JAEGER_OPERATOR_VERSION="1.52.0" JAEGER_VERSION="1.52.0" ./tests/e2e/upgrade/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-17-173511 True False 56m Cluster version is 4.15.0-0.nightly-2023-12-17-173511' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-17-173511 True False 56m Cluster version is 4.15.0-0.nightly-2023-12-17-173511' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/upgrade/render.sh ++ export SUITE_DIR=./tests/e2e/upgrade ++ SUITE_DIR=./tests/e2e/upgrade ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/upgrade ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + export JAEGER_NAME + '[' true = true ']' + skip_test upgrade 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade + warning 'upgrade: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade: Test not supported in OpenShift\e[0m' WAR: upgrade: Test not supported in OpenShift + '[' true = true ']' + skip_test upgrade-from-latest-release 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade-from-latest-release + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade-from-latest-release + warning 'upgrade-from-latest-release: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade-from-latest-release: Test not supported in OpenShift\e[0m' WAR: upgrade-from-latest-release: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running upgrade E2E tests' Running upgrade E2E tests + cd tests/e2e/upgrade/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3803946269 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-grbb5z6p-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 07:34:05 | artifacts | Creating namespace: kuttl-test-rich-loon logger.go:42: 07:34:05 | artifacts | artifacts events from ns kuttl-test-rich-loon: logger.go:42: 07:34:05 | artifacts | Deleting namespace: kuttl-test-rich-loon === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (6.12s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.07s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name upgrade --report --output /logs/artifacts/upgrade.xml ./artifacts/kuttl-report.xml time="2023-12-18T07:34:11Z" level=debug msg="Setting a new name for the test suites" time="2023-12-18T07:34:11Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-18T07:34:11Z" level=debug msg="normalizing test case names" time="2023-12-18T07:34:11Z" level=debug msg="upgrade/artifacts -> upgrade_artifacts" +-------------------+--------+ | NAME | RESULT | +-------------------+--------+ | upgrade_artifacts | passed | +-------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests'