% Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 3831 100 3831 0 0 30613 0 --:--:-- --:--:-- --:--:-- 30648 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 1953 100 1953 0 0 19987 0 --:--:-- --:--:-- --:--:-- 20134 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 350 100 350 0 0 3089 0 --:--:-- --:--:-- --:--:-- 3097 Installing kuttl Try 0... curl -sLo /tmp/jaeger-tests/hack/install/../../bin/kubectl-kuttl https://github.com/kudobuilder/kuttl/releases/download/v0.15.0/kubectl-kuttl_0.15.0_linux_x86_64 KUBECONFIG file is: /tmp/kubeconfig-3107041029 for suite in elasticsearch examples generate miscellaneous sidecar streaming ui upgrade; do \ make run-e2e-tests-$suite ; \ done make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh elasticsearch false true + '[' 3 -ne 3 ']' + test_suite_name=elasticsearch + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/elasticsearch.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-elasticsearch make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true \ KAFKA_VERSION=0.32.0 \ SKIP_KAFKA=false \ ./tests/e2e/elasticsearch/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-02-123536 True False 10m Cluster version is 4.15.0-0.nightly-2023-12-02-123536' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-02-123536 True False 10m Cluster version is 4.15.0-0.nightly-2023-12-02-123536' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 0.32.0 ']' ++ version_le 0.32.0 0.25.0 +++ echo 0.32.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 0.32.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/elasticsearch/render.sh ++ export SUITE_DIR=./tests/e2e/elasticsearch ++ SUITE_DIR=./tests/e2e/elasticsearch ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/elasticsearch ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + start_test es-from-aio-to-production + '[' 1 -ne 1 ']' + test_name=es-from-aio-to-production + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-from-aio-to-production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-from-aio-to-production\e[0m' Rendering files for test es-from-aio-to-production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-from-aio-to-production + cd es-from-aio-to-production + jaeger_name=my-jaeger + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 03 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=03 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./03-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch.redundancyPolicy="ZeroRedundancy"' ./03-install.yaml + render_smoke_test my-jaeger true 04 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=04 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./04-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test es-increasing-replicas + '[' 1 -ne 1 ']' + test_name=es-increasing-replicas + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-increasing-replicas' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-increasing-replicas\e[0m' Rendering files for test es-increasing-replicas + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-from-aio-to-production + '[' es-from-aio-to-production '!=' _build ']' + cd .. + mkdir -p es-increasing-replicas + cd es-increasing-replicas + jaeger_name=simple-prod + '[' true = true ']' + jaeger_deployment_mode=production_autoprovisioned + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.replicas=2 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.query.replicas=2 ./02-install.yaml + cp ./01-assert.yaml ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.replicas=2 ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .status.readyReplicas=2 ./02-assert.yaml + render_smoke_test simple-prod true 03 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=03 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./03-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + cp ./02-install.yaml ./04-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.elasticsearch.nodeCount=2 ./04-install.yaml + /tmp/jaeger-tests/bin/gomplate -f ./openshift-check-es-nodes.yaml.template -o ./05-check-es-nodes.yaml + '[' true = true ']' + skip_test es-index-cleaner-upstream 'SKIP_ES_EXTERNAL is true' + '[' 2 -ne 2 ']' + test_name=es-index-cleaner-upstream + message='SKIP_ES_EXTERNAL is true' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-increasing-replicas + '[' es-increasing-replicas '!=' _build ']' + cd .. + rm -rf es-index-cleaner-upstream + warning 'es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true\e[0m' WAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true + '[' true = true ']' + es_index_cleaner -autoprov production_autoprovisioned + '[' 2 -ne 2 ']' + postfix=-autoprov + jaeger_deployment_strategy=production_autoprovisioned + start_test es-index-cleaner-autoprov + '[' 1 -ne 1 ']' + test_name=es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-index-cleaner-autoprov' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-index-cleaner-autoprov\e[0m' Rendering files for test es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-index-cleaner-autoprov + cd es-index-cleaner-autoprov + jaeger_name=test-es-index-cleaner-with-prefix + cronjob_name=test-es-index-cleaner-with-prefix-es-index-cleaner + secured_es_connection=false + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_managed_es ']' + ELASTICSEARCH_URL=https://elasticsearch + secured_es_connection=true + cp ../../es-index-cleaner-upstream/04-assert.yaml ../../es-index-cleaner-upstream/README.md . + render_install_jaeger test-es-index-cleaner-with-prefix production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options.es.index-prefix=""' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.enabled=false ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.numberOfDays=0 ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.esIndexCleaner.schedule="*/1 * * * *"' ./01-install.yaml + render_report_spans test-es-index-cleaner-with-prefix true 5 00 true 02 + '[' 6 -ne 6 ']' + jaeger=test-es-index-cleaner-with-prefix + is_secured=true + number_of_spans=5 + job_number=00 + ensure_reported_spans=true + test_step=02 + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + export JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + export JOB_NUMBER=00 + JOB_NUMBER=00 + export DAYS=5 + DAYS=5 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./02-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./02-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + sed 's~enabled: false~enabled: true~gi' ./01-install.yaml + CRONJOB_NAME=test-es-index-cleaner-with-prefix-es-index-cleaner + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/wait-for-cronjob-execution.yaml.template -o ./04-wait-es-index-cleaner.yaml + /tmp/jaeger-tests/bin/gomplate -f ./01-install.yaml -o ./05-install.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' 00 06 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + job_number=00 + test_step=06 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=test-es-index-cleaner-with-prefix-curator + JOB_NUMBER=00 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + MOUNT_SECRET=test-es-index-cleaner-with-prefix-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./06-check-indices.yaml + JOB_NUMBER=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./06-assert.yaml + '[' true = true ']' + get_elasticsearch_openshift_operator_version + export ESO_OPERATOR_VERSION + '[' true = true ']' ++ kubectl get pods -l name=elasticsearch-operator --all-namespaces '-o=jsonpath={.items[0].metadata.annotations.operatorframework\.io/properties}' + properties='{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.0"}}]}' + '[' -z '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.0"}}]}' ']' ++ echo '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.0"}}]}' ++ /tmp/jaeger-tests/bin/yq e -P '.properties.[] | select(.value.packageName == "elasticsearch-operator") | .value.version' + ESO_OPERATOR_VERSION=5.8.0 ++ version_ge 5.8.0 5.4 +++ echo 5.8.0 5.4 +++ tr ' ' '\n' +++ sort -rV +++ head -n 1 ++ test 5.8.0 == 5.8.0 + '[' -n '' ']' + skip_test es-index-cleaner-managed 'Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 2 -ne 2 ']' + test_name=es-index-cleaner-managed + message='Test only supported with Elasticsearch OpenShift Operator >= 5.4' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-index-cleaner-autoprov + '[' es-index-cleaner-autoprov '!=' _build ']' + cd .. + rm -rf es-index-cleaner-managed + warning 'es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4\e[0m' WAR: es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4 + '[' true = true ']' + start_test es-multiinstance + '[' 1 -ne 1 ']' + test_name=es-multiinstance + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-multiinstance' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-multiinstance\e[0m' Rendering files for test es-multiinstance + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-multiinstance + cd es-multiinstance + jaeger_name=instance-1 + render_install_jaeger instance-1 production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=instance-1 + JAEGER_NAME=instance-1 + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/gomplate -f ./03-create-second-instance.yaml.template -o 03-create-second-instance.yaml + '[' true = true ']' + skip_test es-rollover-upstream 'SKIP_ES_EXTERNAL is true' + '[' 2 -ne 2 ']' + test_name=es-rollover-upstream + message='SKIP_ES_EXTERNAL is true' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-multiinstance + '[' es-multiinstance '!=' _build ']' + cd .. + rm -rf es-rollover-upstream + warning 'es-rollover-upstream: SKIP_ES_EXTERNAL is true' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-rollover-upstream: SKIP_ES_EXTERNAL is true\e[0m' WAR: es-rollover-upstream: SKIP_ES_EXTERNAL is true + '[' true = true ']' + es_rollover -autoprov production_autoprovisioned + '[' 2 -ne 2 ']' + postfix=-autoprov + jaeger_deployment_strategy=production_autoprovisioned + start_test es-rollover-autoprov + '[' 1 -ne 1 ']' + test_name=es-rollover-autoprov + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-rollover-autoprov' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-rollover-autoprov\e[0m' Rendering files for test es-rollover-autoprov + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-rollover-autoprov + cd es-rollover-autoprov + cp ../../es-rollover-upstream/05-assert.yaml ../../es-rollover-upstream/05-install.yaml ../../es-rollover-upstream/README.md . + jaeger_name=my-jaeger + secured_es_connection=false + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_managed_es ']' + ELASTICSEARCH_URL=https://elasticsearch + secured_es_connection=true + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_report_spans my-jaeger true 2 00 true 02 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=00 + ensure_reported_spans=true + test_step=02 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=00 + JOB_NUMBER=00 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./02-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./02-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' 00 03 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + job_number=00 + test_step=03 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=00 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./03-check-indices.yaml + JOB_NUMBER=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./03-assert.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' 01 04 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + job_number=01 + test_step=04 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=01 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./04-check-indices.yaml + JOB_NUMBER=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./04-assert.yaml + render_report_spans my-jaeger true 2 02 true 06 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=02 + ensure_reported_spans=true + test_step=06 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=02 + JOB_NUMBER=02 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./06-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./06-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' 02 07 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + job_number=02 + test_step=07 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=02 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./07-check-indices.yaml + JOB_NUMBER=02 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./07-assert.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' 03 08 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + job_number=03 + test_step=08 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=03 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./08-check-indices.yaml + JOB_NUMBER=03 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./08-assert.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' 04 09 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + job_number=04 + test_step=09 + escape_command ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=04 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./09-check-indices.yaml + JOB_NUMBER=04 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./09-assert.yaml + render_report_spans my-jaeger true 2 03 true 10 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=03 + ensure_reported_spans=true + test_step=10 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=03 + JOB_NUMBER=03 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./10-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./10-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + CRONJOB_NAME=my-jaeger-es-rollover + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/wait-for-cronjob-execution.yaml.template -o ./11-wait-rollover.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-000002'\'',' 05 11 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-000002'\'',' + job_number=05 + test_step=11 + escape_command ''\''--name'\'', '\''jaeger-span-000002'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-000002'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-000002'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-000002'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-000002'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=05 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-000002'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./11-check-indices.yaml + JOB_NUMBER=05 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./11-assert.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' 06 12 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + job_number=06 + test_step=12 + escape_command ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=06 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./12-check-indices.yaml + JOB_NUMBER=06 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./12-assert.yaml + '[' true = true ']' + get_elasticsearch_openshift_operator_version + export ESO_OPERATOR_VERSION + '[' true = true ']' ++ kubectl get pods -l name=elasticsearch-operator --all-namespaces '-o=jsonpath={.items[0].metadata.annotations.operatorframework\.io/properties}' + properties='{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.0"}}]}' + '[' -z '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.0"}}]}' ']' ++ echo '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.15},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.0"}}]}' ++ /tmp/jaeger-tests/bin/yq e -P '.properties.[] | select(.value.packageName == "elasticsearch-operator") | .value.version' + ESO_OPERATOR_VERSION=5.8.0 ++ version_ge 5.8.0 5.4 +++ echo 5.8.0 5.4 +++ tr ' ' '\n' +++ sort -rV +++ head -n 1 ++ test 5.8.0 == 5.8.0 + '[' -n '' ']' + skip_test es-rollover-managed 'Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 2 -ne 2 ']' + test_name=es-rollover-managed + message='Test only supported with Elasticsearch OpenShift Operator >= 5.4' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-rollover-autoprov + '[' es-rollover-autoprov '!=' _build ']' + cd .. + rm -rf es-rollover-managed + warning 'es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4\e[0m' WAR: es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4 + '[' true = true ']' + skip_test es-spark-dependencies 'This test is not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=es-spark-dependencies + message='This test is not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + rm -rf es-spark-dependencies + warning 'es-spark-dependencies: This test is not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-spark-dependencies: This test is not supported in OpenShift\e[0m' WAR: es-spark-dependencies: This test is not supported in OpenShift + [[ true = true ]] + [[ false = false ]] + start_test es-streaming-autoprovisioned + '[' 1 -ne 1 ']' + test_name=es-streaming-autoprovisioned + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-streaming-autoprovisioned' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-streaming-autoprovisioned\e[0m' Rendering files for test es-streaming-autoprovisioned + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-streaming-autoprovisioned + cd es-streaming-autoprovisioned + jaeger_name=auto-provisioned + render_assert_kafka true auto-provisioned 00 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provisioned + test_step=00 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./1-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_smoke_test auto-provisioned true 04 + '[' 3 -ne 3 ']' + jaeger=auto-provisioned + is_secured=true + test_step=04 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://auto-provisioned-query:443 + JAEGER_QUERY_ENDPOINT=https://auto-provisioned-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://auto-provisioned-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://auto-provisioned-collector-headless:14268 + export JAEGER_NAME=auto-provisioned + JAEGER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./04-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running elasticsearch E2E tests' Running elasticsearch E2E tests + cd tests/e2e/elasticsearch/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3107041029 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 8 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/es-from-aio-to-production === PAUSE kuttl/harness/es-from-aio-to-production === RUN kuttl/harness/es-increasing-replicas === PAUSE kuttl/harness/es-increasing-replicas === RUN kuttl/harness/es-index-cleaner-autoprov === PAUSE kuttl/harness/es-index-cleaner-autoprov === RUN kuttl/harness/es-multiinstance === PAUSE kuttl/harness/es-multiinstance === RUN kuttl/harness/es-rollover-autoprov === PAUSE kuttl/harness/es-rollover-autoprov === RUN kuttl/harness/es-simple-prod === PAUSE kuttl/harness/es-simple-prod === RUN kuttl/harness/es-streaming-autoprovisioned === PAUSE kuttl/harness/es-streaming-autoprovisioned === CONT kuttl/harness/artifacts logger.go:42: 06:48:22 | artifacts | Creating namespace: kuttl-test-careful-horse logger.go:42: 06:48:22 | artifacts | artifacts events from ns kuttl-test-careful-horse: logger.go:42: 06:48:22 | artifacts | Deleting namespace: kuttl-test-careful-horse === CONT kuttl/harness/es-multiinstance logger.go:42: 06:48:28 | es-multiinstance | Ignoring 03-create-second-instance.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:48:28 | es-multiinstance | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:48:28 | es-multiinstance | Creating namespace: kuttl-test-suited-sheep logger.go:42: 06:48:29 | es-multiinstance/0-clear-namespace | starting test step 0-clear-namespace logger.go:42: 06:48:29 | es-multiinstance/0-clear-namespace | running command: [sh -c kubectl delete namespace jaeger-e2e-multiinstance-test --ignore-not-found=true] logger.go:42: 06:48:29 | es-multiinstance/0-clear-namespace | test step completed 0-clear-namespace logger.go:42: 06:48:29 | es-multiinstance/1-install | starting test step 1-install logger.go:42: 06:48:29 | es-multiinstance/1-install | Jaeger:kuttl-test-suited-sheep/instance-1 created logger.go:42: 06:49:32 | es-multiinstance/1-install | test step completed 1-install logger.go:42: 06:49:32 | es-multiinstance/2-create-namespace | starting test step 2-create-namespace logger.go:42: 06:49:32 | es-multiinstance/2-create-namespace | running command: [sh -c kubectl create namespace jaeger-e2e-multiinstance-test] logger.go:42: 06:49:32 | es-multiinstance/2-create-namespace | namespace/jaeger-e2e-multiinstance-test created logger.go:42: 06:49:32 | es-multiinstance/2-create-namespace | test step completed 2-create-namespace logger.go:42: 06:49:32 | es-multiinstance/3-create-second-instance | starting test step 3-create-second-instance logger.go:42: 06:49:32 | es-multiinstance/3-create-second-instance | running command: [sh -c kubectl apply -f ./01-install.yaml -n jaeger-e2e-multiinstance-test] logger.go:42: 06:49:34 | es-multiinstance/3-create-second-instance | jaeger.jaegertracing.io/instance-1 created logger.go:42: 06:49:34 | es-multiinstance/3-create-second-instance | running command: [sh -c /tmp/jaeger-tests/bin/kubectl-kuttl assert ./01-assert.yaml -n jaeger-e2e-multiinstance-test --timeout 1000] logger.go:42: 06:50:22 | es-multiinstance/3-create-second-instance | assert is valid logger.go:42: 06:50:22 | es-multiinstance/3-create-second-instance | test step completed 3-create-second-instance logger.go:42: 06:50:22 | es-multiinstance/4-check-secrets | starting test step 4-check-secrets logger.go:42: 06:50:22 | es-multiinstance/4-check-secrets | running command: [sh -c kubectl get secrets elasticsearch -o jsonpath='{.data.logging-es\.crt}' -n $NAMESPACE > secret1] logger.go:42: 06:50:23 | es-multiinstance/4-check-secrets | running command: [sh -c kubectl get secrets elasticsearch -o jsonpath='{.data.logging-es\.crt}' -n jaeger-e2e-multiinstance-test > secret2] logger.go:42: 06:50:23 | es-multiinstance/4-check-secrets | running command: [sh -c cmp --silent secret1 secret2 || exit 0] logger.go:42: 06:50:23 | es-multiinstance/4-check-secrets | test step completed 4-check-secrets logger.go:42: 06:50:23 | es-multiinstance/5-delete | starting test step 5-delete logger.go:42: 06:50:23 | es-multiinstance/5-delete | running command: [sh -c kubectl delete namespace jaeger-e2e-multiinstance-test --wait=false] logger.go:42: 06:50:23 | es-multiinstance/5-delete | namespace "jaeger-e2e-multiinstance-test" deleted logger.go:42: 06:50:23 | es-multiinstance/5-delete | test step completed 5-delete logger.go:42: 06:50:23 | es-multiinstance | es-multiinstance events from ns kuttl-test-suited-sheep: logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:48:35 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestsuitedsheepinstance1-1-57ccc7f4fd SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestsuitedsheepinstance1-1-57ccc7f4sfc7t replicaset-controller logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:48:35 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsuitedsheepinstance1-1-57ccc7f4sfc7t Binding Scheduled Successfully assigned kuttl-test-suited-sheep/elasticsearch-cdm-kuttltestsuitedsheepinstance1-1-57ccc7f4sfc7t to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:48:35 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestsuitedsheepinstance1-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestsuitedsheepinstance1-1-57ccc7f4fd to 1 deployment-controller logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:48:36 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsuitedsheepinstance1-1-57ccc7f4sfc7t AddedInterface Add eth0 [10.129.2.18/23] from ovn-kubernetes logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:48:36 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsuitedsheepinstance1-1-57ccc7f4sfc7t.spec.containers{elasticsearch} Pulling Pulling image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:48:44 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsuitedsheepinstance1-1-57ccc7f4sfc7t.spec.containers{elasticsearch} Pulled Successfully pulled image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" in 8.618s (8.618s including waiting) kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:48:44 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsuitedsheepinstance1-1-57ccc7f4sfc7t.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:48:44 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsuitedsheepinstance1-1-57ccc7f4sfc7t.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:48:44 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsuitedsheepinstance1-1-57ccc7f4sfc7t.spec.containers{proxy} Pulling Pulling image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:48:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsuitedsheepinstance1-1-57ccc7f4sfc7t.spec.containers{proxy} Pulled Successfully pulled image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" in 4.505s (4.505s including waiting) kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:48:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsuitedsheepinstance1-1-57ccc7f4sfc7t.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:48:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsuitedsheepinstance1-1-57ccc7f4sfc7t.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:48:56 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestsuitedsheepinstance1-1-57ccc7f4sfc7t.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:01 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestsuitedsheepinstance1-1-57ccc7f4sfc7t.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:11 +0000 UTC Normal Pod instance-1-collector-899cdd544-79x87 Binding Scheduled Successfully assigned kuttl-test-suited-sheep/instance-1-collector-899cdd544-79x87 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:11 +0000 UTC Normal ReplicaSet.apps instance-1-collector-899cdd544 SuccessfulCreate Created pod: instance-1-collector-899cdd544-79x87 replicaset-controller logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:11 +0000 UTC Normal Deployment.apps instance-1-collector ScalingReplicaSet Scaled up replica set instance-1-collector-899cdd544 to 1 deployment-controller logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:12 +0000 UTC Normal Pod instance-1-collector-899cdd544-79x87 AddedInterface Add eth0 [10.128.2.23/23] from ovn-kubernetes logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:12 +0000 UTC Normal Pod instance-1-collector-899cdd544-79x87.spec.containers{jaeger-collector} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:12 +0000 UTC Normal Pod instance-1-query-684f9c9975-n6x89 Binding Scheduled Successfully assigned kuttl-test-suited-sheep/instance-1-query-684f9c9975-n6x89 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:12 +0000 UTC Normal Pod instance-1-query-684f9c9975-n6x89 AddedInterface Add eth0 [10.128.2.24/23] from ovn-kubernetes logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:12 +0000 UTC Normal Pod instance-1-query-684f9c9975-n6x89.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:12 +0000 UTC Normal ReplicaSet.apps instance-1-query-684f9c9975 SuccessfulCreate Created pod: instance-1-query-684f9c9975-n6x89 replicaset-controller logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:12 +0000 UTC Normal Deployment.apps instance-1-query ScalingReplicaSet Scaled up replica set instance-1-query-684f9c9975 to 1 deployment-controller logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:24 +0000 UTC Normal Pod instance-1-collector-899cdd544-79x87.spec.containers{jaeger-collector} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" in 11.84s (11.84s including waiting) kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:24 +0000 UTC Normal Pod instance-1-collector-899cdd544-79x87.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:24 +0000 UTC Normal Pod instance-1-collector-899cdd544-79x87.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:24 +0000 UTC Normal Pod instance-1-query-684f9c9975-n6x89.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" in 11.779s (11.779s including waiting) kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:24 +0000 UTC Normal Pod instance-1-query-684f9c9975-n6x89.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:24 +0000 UTC Normal Pod instance-1-query-684f9c9975-n6x89.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:24 +0000 UTC Normal Pod instance-1-query-684f9c9975-n6x89.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:24 +0000 UTC Normal Pod instance-1-query-684f9c9975-n6x89.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:24 +0000 UTC Normal Pod instance-1-query-684f9c9975-n6x89.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:24 +0000 UTC Normal Pod instance-1-query-684f9c9975-n6x89.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:30 +0000 UTC Normal Pod instance-1-query-684f9c9975-n6x89.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" in 5.648s (5.648s including waiting) kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:30 +0000 UTC Normal Pod instance-1-query-684f9c9975-n6x89.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:30 +0000 UTC Normal Pod instance-1-query-684f9c9975-n6x89.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:48 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:49 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:49:49 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:50:19 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:50:19 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod instance-1-collector-899cdd544-79x87 horizontal-pod-autoscaler logger.go:42: 06:50:23 | es-multiinstance | 2023-12-04 06:50:19 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 06:50:23 | es-multiinstance | Deleting namespace: kuttl-test-suited-sheep === CONT kuttl/harness/es-streaming-autoprovisioned logger.go:42: 06:50:30 | es-streaming-autoprovisioned | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:50:30 | es-streaming-autoprovisioned | Creating namespace: kuttl-test-exciting-jay logger.go:42: 06:50:30 | es-streaming-autoprovisioned/0-install | starting test step 0-install logger.go:42: 06:50:30 | es-streaming-autoprovisioned/0-install | Jaeger:kuttl-test-exciting-jay/auto-provisioned created logger.go:42: 06:51:49 | es-streaming-autoprovisioned/0-install | test step completed 0-install logger.go:42: 06:51:49 | es-streaming-autoprovisioned/1- | starting test step 1- logger.go:42: 06:52:19 | es-streaming-autoprovisioned/1- | test step completed 1- logger.go:42: 06:52:19 | es-streaming-autoprovisioned/2- | starting test step 2- logger.go:42: 06:52:43 | es-streaming-autoprovisioned/2- | test step completed 2- logger.go:42: 06:52:43 | es-streaming-autoprovisioned/3- | starting test step 3- logger.go:42: 06:52:54 | es-streaming-autoprovisioned/3- | test step completed 3- logger.go:42: 06:52:54 | es-streaming-autoprovisioned/4-smoke-test | starting test step 4-smoke-test logger.go:42: 06:52:54 | es-streaming-autoprovisioned/4-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE auto-provisioned /dev/null] logger.go:42: 06:52:55 | es-streaming-autoprovisioned/4-smoke-test | Warning: resource jaegers/auto-provisioned is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 06:53:03 | es-streaming-autoprovisioned/4-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JAEGER_COLLECTOR_ENDPOINT=http://auto-provisioned-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://auto-provisioned-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 06:53:04 | es-streaming-autoprovisioned/4-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 06:53:06 | es-streaming-autoprovisioned/4-smoke-test | job.batch/report-span created logger.go:42: 06:53:06 | es-streaming-autoprovisioned/4-smoke-test | job.batch/check-span created logger.go:42: 06:53:20 | es-streaming-autoprovisioned/4-smoke-test | test step completed 4-smoke-test logger.go:42: 06:53:20 | es-streaming-autoprovisioned | es-streaming-autoprovisioned events from ns kuttl-test-exciting-jay: logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:50:37 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestexcitingjayautoprovisioned-1-77c579d74d SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestexcitingjayautoprovisioned-1-77zkksc replicaset-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:50:37 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexcitingjayautoprovisioned-1-77zkksc Binding Scheduled Successfully assigned kuttl-test-exciting-jay/elasticsearch-cdm-kuttltestexcitingjayautoprovisioned-1-77zkksc to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:50:37 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexcitingjayautoprovisioned-1-77zkksc AddedInterface Add eth0 [10.129.2.19/23] from ovn-kubernetes logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:50:37 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexcitingjayautoprovisioned-1-77zkksc.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:50:37 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexcitingjayautoprovisioned-1-77zkksc.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:50:37 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexcitingjayautoprovisioned-1-77zkksc.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:50:37 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexcitingjayautoprovisioned-1-77zkksc.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:50:37 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexcitingjayautoprovisioned-1-77zkksc.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:50:37 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexcitingjayautoprovisioned-1-77zkksc.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:50:37 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestexcitingjayautoprovisioned-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestexcitingjayautoprovisioned-1-77c579d74d to 1 deployment-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:50:47 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestexcitingjayautoprovisioned-1-77zkksc.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:50:52 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestexcitingjayautoprovisioned-1-77zkksc.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:05 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-zookeeper NoPods No matching pods found controllermanager logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:05 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:06 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:06 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-exciting-jay/data-auto-provisioned-zookeeper-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-5dbd99d646-z4wrh_f641fef9-5739-436e-be4f-a8f18fb64662 logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:10 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-4dc3c46f-26f1-46e6-ab8e-ceb78b60e57a ebs.csi.aws.com_aws-ebs-csi-driver-controller-5dbd99d646-z4wrh_f641fef9-5739-436e-be4f-a8f18fb64662 logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:11 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-exciting-jay/auto-provisioned-zookeeper-0 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:13 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-4dc3c46f-26f1-46e6-ab8e-ceb78b60e57a" attachdetach-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:19 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 AddedInterface Add eth0 [10.128.2.28/23] from ovn-kubernetes logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:19 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulling Pulling image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:26 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulled Successfully pulled image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" in 7.479s (7.479s including waiting) kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:26 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:26 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:50 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-kafka NoPods No matching pods found controllermanager logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:50 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:50 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:50 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-exciting-jay/data-0-auto-provisioned-kafka-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-5dbd99d646-z4wrh_f641fef9-5739-436e-be4f-a8f18fb64662 logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:55 +0000 UTC Normal Pod auto-provisioned-kafka-0 Binding Scheduled Successfully assigned kuttl-test-exciting-jay/auto-provisioned-kafka-0 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:55 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-aad0206c-e7d6-46e4-baf0-33906bca5c04 ebs.csi.aws.com_aws-ebs-csi-driver-controller-5dbd99d646-z4wrh_f641fef9-5739-436e-be4f-a8f18fb64662 logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:58 +0000 UTC Normal Pod auto-provisioned-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-aad0206c-e7d6-46e4-baf0-33906bca5c04" attachdetach-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:59 +0000 UTC Normal Pod auto-provisioned-kafka-0 AddedInterface Add eth0 [10.128.2.29/23] from ovn-kubernetes logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:59 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" already present on machine kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:59 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:51:59 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:22 +0000 UTC Normal Pod auto-provisioned-entity-operator-7cdfc47bf5-frhpq Binding Scheduled Successfully assigned kuttl-test-exciting-jay/auto-provisioned-entity-operator-7cdfc47bf5-frhpq to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:22 +0000 UTC Normal Pod auto-provisioned-entity-operator-7cdfc47bf5-frhpq AddedInterface Add eth0 [10.128.2.30/23] from ovn-kubernetes logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:22 +0000 UTC Normal Pod auto-provisioned-entity-operator-7cdfc47bf5-frhpq.spec.containers{topic-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:464b04e622e0b3472e8a1e1ce8a2efd32cf27fc2056d3d589bfe6b5f9ac0bf4e" already present on machine kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:22 +0000 UTC Normal Pod auto-provisioned-entity-operator-7cdfc47bf5-frhpq.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:22 +0000 UTC Normal Pod auto-provisioned-entity-operator-7cdfc47bf5-frhpq.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:22 +0000 UTC Normal Pod auto-provisioned-entity-operator-7cdfc47bf5-frhpq.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:464b04e622e0b3472e8a1e1ce8a2efd32cf27fc2056d3d589bfe6b5f9ac0bf4e" already present on machine kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:22 +0000 UTC Normal Pod auto-provisioned-entity-operator-7cdfc47bf5-frhpq.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:22 +0000 UTC Normal Pod auto-provisioned-entity-operator-7cdfc47bf5-frhpq.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:22 +0000 UTC Normal Pod auto-provisioned-entity-operator-7cdfc47bf5-frhpq.spec.containers{tls-sidecar} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" already present on machine kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:22 +0000 UTC Normal Pod auto-provisioned-entity-operator-7cdfc47bf5-frhpq.spec.containers{tls-sidecar} Created Created container tls-sidecar kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:22 +0000 UTC Normal Pod auto-provisioned-entity-operator-7cdfc47bf5-frhpq.spec.containers{tls-sidecar} Started Started container tls-sidecar kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:22 +0000 UTC Normal ReplicaSet.apps auto-provisioned-entity-operator-7cdfc47bf5 SuccessfulCreate Created pod: auto-provisioned-entity-operator-7cdfc47bf5-frhpq replicaset-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:22 +0000 UTC Normal Deployment.apps auto-provisioned-entity-operator ScalingReplicaSet Scaled up replica set auto-provisioned-entity-operator-7cdfc47bf5 to 1 deployment-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:45 +0000 UTC Normal Pod auto-provisioned-collector-6896655ddd-t8ll2 Binding Scheduled Successfully assigned kuttl-test-exciting-jay/auto-provisioned-collector-6896655ddd-t8ll2 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:45 +0000 UTC Warning Pod auto-provisioned-collector-6896655ddd-t8ll2 FailedMount MountVolume.SetUp failed for volume "auto-provisioned-collector-tls-config-volume" : secret "auto-provisioned-collector-headless-tls" not found kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:45 +0000 UTC Normal ReplicaSet.apps auto-provisioned-collector-6896655ddd SuccessfulCreate Created pod: auto-provisioned-collector-6896655ddd-t8ll2 replicaset-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:45 +0000 UTC Normal Deployment.apps auto-provisioned-collector ScalingReplicaSet Scaled up replica set auto-provisioned-collector-6896655ddd to 1 deployment-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:45 +0000 UTC Normal Pod auto-provisioned-ingester-6679849774-f54dz Binding Scheduled Successfully assigned kuttl-test-exciting-jay/auto-provisioned-ingester-6679849774-f54dz to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:45 +0000 UTC Normal Pod auto-provisioned-ingester-6679849774-f54dz AddedInterface Add eth0 [10.131.0.23/23] from ovn-kubernetes logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:45 +0000 UTC Normal Pod auto-provisioned-ingester-6679849774-f54dz.spec.containers{jaeger-ingester} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:109a3676bfaccda0021c57bbe82bceed5140faaedb8bad4d7d2be7cd660de039" kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:45 +0000 UTC Normal ReplicaSet.apps auto-provisioned-ingester-6679849774 SuccessfulCreate Created pod: auto-provisioned-ingester-6679849774-f54dz replicaset-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:45 +0000 UTC Normal Deployment.apps auto-provisioned-ingester ScalingReplicaSet Scaled up replica set auto-provisioned-ingester-6679849774 to 1 deployment-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:45 +0000 UTC Normal Pod auto-provisioned-query-5dcdd46655-6mnn8 Binding Scheduled Successfully assigned kuttl-test-exciting-jay/auto-provisioned-query-5dcdd46655-6mnn8 to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:45 +0000 UTC Normal Pod auto-provisioned-query-5dcdd46655-6mnn8 AddedInterface Add eth0 [10.131.0.24/23] from ovn-kubernetes logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:45 +0000 UTC Normal Pod auto-provisioned-query-5dcdd46655-6mnn8.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:45 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-5dcdd46655 SuccessfulCreate Created pod: auto-provisioned-query-5dcdd46655-6mnn8 replicaset-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:45 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-5dcdd46655 to 1 deployment-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:46 +0000 UTC Normal Pod auto-provisioned-collector-6896655ddd-t8ll2 AddedInterface Add eth0 [10.128.2.31/23] from ovn-kubernetes logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:46 +0000 UTC Normal Pod auto-provisioned-collector-6896655ddd-t8ll2.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:46 +0000 UTC Normal Pod auto-provisioned-collector-6896655ddd-t8ll2.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:46 +0000 UTC Normal Pod auto-provisioned-collector-6896655ddd-t8ll2.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:50 +0000 UTC Normal Pod auto-provisioned-query-5dcdd46655-6mnn8.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" in 4.85s (4.85s including waiting) kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:50 +0000 UTC Normal Pod auto-provisioned-query-5dcdd46655-6mnn8.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:50 +0000 UTC Normal Pod auto-provisioned-query-5dcdd46655-6mnn8.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:50 +0000 UTC Normal Pod auto-provisioned-query-5dcdd46655-6mnn8.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:50 +0000 UTC Normal Pod auto-provisioned-query-5dcdd46655-6mnn8.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:50 +0000 UTC Normal Pod auto-provisioned-query-5dcdd46655-6mnn8.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:50 +0000 UTC Normal Pod auto-provisioned-query-5dcdd46655-6mnn8.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:51 +0000 UTC Normal Pod auto-provisioned-ingester-6679849774-f54dz.spec.containers{jaeger-ingester} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:109a3676bfaccda0021c57bbe82bceed5140faaedb8bad4d7d2be7cd660de039" in 5.612s (5.612s including waiting) kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:51 +0000 UTC Normal Pod auto-provisioned-ingester-6679849774-f54dz.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:51 +0000 UTC Normal Pod auto-provisioned-ingester-6679849774-f54dz.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:52 +0000 UTC Normal Pod auto-provisioned-query-5dcdd46655-6mnn8.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" in 1.657s (1.657s including waiting) kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:52 +0000 UTC Normal Pod auto-provisioned-query-5dcdd46655-6mnn8.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:52 +0000 UTC Normal Pod auto-provisioned-query-5dcdd46655-6mnn8.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:58 +0000 UTC Normal Pod auto-provisioned-query-5dcdd46655-6mnn8.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:58 +0000 UTC Normal Pod auto-provisioned-query-5dcdd46655-6mnn8.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:58 +0000 UTC Normal Pod auto-provisioned-query-5dcdd46655-6mnn8.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:58 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-5dcdd46655 SuccessfulDelete Deleted pod: auto-provisioned-query-5dcdd46655-6mnn8 replicaset-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:58 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled down replica set auto-provisioned-query-5dcdd46655 to 0 from 1 deployment-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:59 +0000 UTC Normal Pod auto-provisioned-query-7fb6bcd889-rjrkg Binding Scheduled Successfully assigned kuttl-test-exciting-jay/auto-provisioned-query-7fb6bcd889-rjrkg to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:59 +0000 UTC Normal Pod auto-provisioned-query-7fb6bcd889-rjrkg AddedInterface Add eth0 [10.131.0.25/23] from ovn-kubernetes logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:59 +0000 UTC Normal Pod auto-provisioned-query-7fb6bcd889-rjrkg.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:59 +0000 UTC Normal Pod auto-provisioned-query-7fb6bcd889-rjrkg.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:59 +0000 UTC Normal Pod auto-provisioned-query-7fb6bcd889-rjrkg.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:59 +0000 UTC Normal Pod auto-provisioned-query-7fb6bcd889-rjrkg.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:59 +0000 UTC Normal Pod auto-provisioned-query-7fb6bcd889-rjrkg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:59 +0000 UTC Normal Pod auto-provisioned-query-7fb6bcd889-rjrkg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:59 +0000 UTC Normal Pod auto-provisioned-query-7fb6bcd889-rjrkg.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:59 +0000 UTC Normal Pod auto-provisioned-query-7fb6bcd889-rjrkg.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:59 +0000 UTC Normal Pod auto-provisioned-query-7fb6bcd889-rjrkg.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:59 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-7fb6bcd889 SuccessfulCreate Created pod: auto-provisioned-query-7fb6bcd889-rjrkg replicaset-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:52:59 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-7fb6bcd889 to 1 deployment-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:53:00 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:53:00 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:53:00 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:53:00 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:53:00 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:53:00 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:53:06 +0000 UTC Normal Pod check-span-h8vn8 Binding Scheduled Successfully assigned kuttl-test-exciting-jay/check-span-h8vn8 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:53:06 +0000 UTC Normal Pod check-span-h8vn8 AddedInterface Add eth0 [10.128.2.32/23] from ovn-kubernetes logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:53:06 +0000 UTC Normal Pod check-span-h8vn8.spec.containers{asserts-container} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:53:06 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-h8vn8 job-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:53:06 +0000 UTC Normal Pod report-span-9955p Binding Scheduled Successfully assigned kuttl-test-exciting-jay/report-span-9955p to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:53:06 +0000 UTC Normal Pod report-span-9955p AddedInterface Add eth0 [10.131.0.26/23] from ovn-kubernetes logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:53:06 +0000 UTC Normal Pod report-span-9955p.spec.containers{report-span} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:53:06 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-9955p job-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:53:09 +0000 UTC Normal Pod check-span-h8vn8.spec.containers{asserts-container} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" in 2.691s (2.691s including waiting) kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:53:09 +0000 UTC Normal Pod check-span-h8vn8.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:53:09 +0000 UTC Normal Pod check-span-h8vn8.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:53:10 +0000 UTC Normal Pod report-span-9955p.spec.containers{report-span} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" in 4.007s (4.007s including waiting) kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:53:10 +0000 UTC Normal Pod report-span-9955p.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:53:10 +0000 UTC Normal Pod report-span-9955p.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 06:53:20 | es-streaming-autoprovisioned | 2023-12-04 06:53:20 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 06:53:20 | es-streaming-autoprovisioned | Deleting namespace: kuttl-test-exciting-jay === CONT kuttl/harness/es-simple-prod logger.go:42: 06:53:56 | es-simple-prod | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:53:56 | es-simple-prod | Creating namespace: kuttl-test-nice-ape logger.go:42: 06:53:56 | es-simple-prod | es-simple-prod events from ns kuttl-test-nice-ape: logger.go:42: 06:53:56 | es-simple-prod | Deleting namespace: kuttl-test-nice-ape === CONT kuttl/harness/es-rollover-autoprov logger.go:42: 06:54:02 | es-rollover-autoprov | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:54:02 | es-rollover-autoprov | Creating namespace: kuttl-test-cuddly-crab logger.go:42: 06:54:02 | es-rollover-autoprov/1-install | starting test step 1-install logger.go:42: 06:54:02 | es-rollover-autoprov/1-install | Jaeger:kuttl-test-cuddly-crab/my-jaeger created logger.go:42: 06:54:37 | es-rollover-autoprov/1-install | test step completed 1-install logger.go:42: 06:54:37 | es-rollover-autoprov/2-report-spans | starting test step 2-report-spans logger.go:42: 06:54:37 | es-rollover-autoprov/2-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 06:54:39 | es-rollover-autoprov/2-report-spans | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 06:54:45 | es-rollover-autoprov/2-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JOB_NUMBER=00 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-00-job.yaml] logger.go:42: 06:54:45 | es-rollover-autoprov/2-report-spans | running command: [sh -c kubectl apply -f report-span-00-job.yaml -n $NAMESPACE] logger.go:42: 06:54:46 | es-rollover-autoprov/2-report-spans | job.batch/00-report-span created logger.go:42: 06:55:09 | es-rollover-autoprov/2-report-spans | test step completed 2-report-spans logger.go:42: 06:55:09 | es-rollover-autoprov/3-check-indices | starting test step 3-check-indices logger.go:42: 06:55:09 | es-rollover-autoprov/3-check-indices | Job:kuttl-test-cuddly-crab/00-check-indices created logger.go:42: 06:55:14 | es-rollover-autoprov/3-check-indices | test step completed 3-check-indices logger.go:42: 06:55:14 | es-rollover-autoprov/4-check-indices | starting test step 4-check-indices logger.go:42: 06:55:14 | es-rollover-autoprov/4-check-indices | Job:kuttl-test-cuddly-crab/01-check-indices created logger.go:42: 06:55:18 | es-rollover-autoprov/4-check-indices | test step completed 4-check-indices logger.go:42: 06:55:18 | es-rollover-autoprov/5-install | starting test step 5-install logger.go:42: 06:55:18 | es-rollover-autoprov/5-install | Jaeger:kuttl-test-cuddly-crab/my-jaeger updated logger.go:42: 06:55:26 | es-rollover-autoprov/5-install | test step completed 5-install logger.go:42: 06:55:26 | es-rollover-autoprov/6-report-spans | starting test step 6-report-spans logger.go:42: 06:55:26 | es-rollover-autoprov/6-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 06:55:34 | es-rollover-autoprov/6-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JOB_NUMBER=02 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-02-job.yaml] logger.go:42: 06:55:34 | es-rollover-autoprov/6-report-spans | running command: [sh -c kubectl apply -f report-span-02-job.yaml -n $NAMESPACE] logger.go:42: 06:55:35 | es-rollover-autoprov/6-report-spans | job.batch/02-report-span created logger.go:42: 06:55:59 | es-rollover-autoprov/6-report-spans | test step completed 6-report-spans logger.go:42: 06:55:59 | es-rollover-autoprov/7-check-indices | starting test step 7-check-indices logger.go:42: 06:55:59 | es-rollover-autoprov/7-check-indices | Job:kuttl-test-cuddly-crab/02-check-indices created logger.go:42: 06:56:02 | es-rollover-autoprov/7-check-indices | test step completed 7-check-indices logger.go:42: 06:56:02 | es-rollover-autoprov/8-check-indices | starting test step 8-check-indices logger.go:42: 06:56:03 | es-rollover-autoprov/8-check-indices | Job:kuttl-test-cuddly-crab/03-check-indices created logger.go:42: 06:56:06 | es-rollover-autoprov/8-check-indices | test step completed 8-check-indices logger.go:42: 06:56:06 | es-rollover-autoprov/9-check-indices | starting test step 9-check-indices logger.go:42: 06:56:06 | es-rollover-autoprov/9-check-indices | Job:kuttl-test-cuddly-crab/04-check-indices created logger.go:42: 06:56:10 | es-rollover-autoprov/9-check-indices | test step completed 9-check-indices logger.go:42: 06:56:10 | es-rollover-autoprov/10-report-spans | starting test step 10-report-spans logger.go:42: 06:56:10 | es-rollover-autoprov/10-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 06:56:17 | es-rollover-autoprov/10-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JOB_NUMBER=03 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-03-job.yaml] logger.go:42: 06:56:18 | es-rollover-autoprov/10-report-spans | running command: [sh -c kubectl apply -f report-span-03-job.yaml -n $NAMESPACE] logger.go:42: 06:56:18 | es-rollover-autoprov/10-report-spans | job.batch/03-report-span created logger.go:42: 06:56:42 | es-rollover-autoprov/10-report-spans | test step completed 10-report-spans logger.go:42: 06:56:42 | es-rollover-autoprov/11-check-indices | starting test step 11-check-indices logger.go:42: 06:56:42 | es-rollover-autoprov/11-check-indices | running command: [sh -c go run ../../../../cmd-utils/wait-cronjob/main.go --cronjob my-jaeger-es-rollover --namespace $NAMESPACE] logger.go:42: 06:56:51 | es-rollover-autoprov/11-check-indices | time="2023-12-04T06:56:51Z" level=debug msg="Checking if the my-jaeger-es-rollover CronJob exists" logger.go:42: 06:56:51 | es-rollover-autoprov/11-check-indices | time="2023-12-04T06:56:51Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 06:56:51 | es-rollover-autoprov/11-check-indices | time="2023-12-04T06:56:51Z" level=info msg="Cronjob my-jaeger-es-rollover found successfully" logger.go:42: 06:56:51 | es-rollover-autoprov/11-check-indices | time="2023-12-04T06:56:51Z" level=debug msg="Waiting for the next scheduled job from my-jaeger-es-rollover cronjob" logger.go:42: 06:56:51 | es-rollover-autoprov/11-check-indices | time="2023-12-04T06:56:51Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 06:56:51 | es-rollover-autoprov/11-check-indices | time="2023-12-04T06:56:51Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 06:57:01 | es-rollover-autoprov/11-check-indices | time="2023-12-04T06:57:01Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 06:57:11 | es-rollover-autoprov/11-check-indices | time="2023-12-04T06:57:11Z" level=info msg="Job of owner my-jaeger-es-rollover succeeded after my-jaeger-es-rollover 20.096647286s" logger.go:42: 06:57:11 | es-rollover-autoprov/11-check-indices | Job:kuttl-test-cuddly-crab/05-check-indices created logger.go:42: 06:57:15 | es-rollover-autoprov/11-check-indices | test step completed 11-check-indices logger.go:42: 06:57:15 | es-rollover-autoprov/12-check-indices | starting test step 12-check-indices logger.go:42: 06:57:16 | es-rollover-autoprov/12-check-indices | Job:kuttl-test-cuddly-crab/06-check-indices created logger.go:42: 06:57:19 | es-rollover-autoprov/12-check-indices | test step completed 12-check-indices logger.go:42: 06:57:19 | es-rollover-autoprov | es-rollover-autoprov events from ns kuttl-test-cuddly-crab: logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:08 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcuddlycrabmyjaeger-1-7d49fbbcb-j6wsx Binding Scheduled Successfully assigned kuttl-test-cuddly-crab/elasticsearch-cdm-kuttltestcuddlycrabmyjaeger-1-7d49fbbcb-j6wsx to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:08 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcuddlycrabmyjaeger-1-7d49fbbcb-j6wsx AddedInterface Add eth0 [10.129.2.20/23] from ovn-kubernetes logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:08 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcuddlycrabmyjaeger-1-7d49fbbcb-j6wsx.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:08 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcuddlycrabmyjaeger-1-7d49fbbcb-j6wsx.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:08 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcuddlycrabmyjaeger-1-7d49fbbcb-j6wsx.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:08 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcuddlycrabmyjaeger-1-7d49fbbcb-j6wsx.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:08 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcuddlycrabmyjaeger-1-7d49fbbcb-j6wsx.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:08 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestcuddlycrabmyjaeger-1-7d49fbbcb SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestcuddlycrabmyjaeger-1-7d49fbbcb-j6wsx replicaset-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:08 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestcuddlycrabmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestcuddlycrabmyjaeger-1-7d49fbbcb to 1 deployment-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcuddlycrabmyjaeger-1-7d49fbbcb-j6wsx.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:18 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestcuddlycrabmyjaeger-1-7d49fbbcb-j6wsx.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:24 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestcuddlycrabmyjaeger-1-7d49fbbcb-j6wsx.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:35 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-hwg6l Binding Scheduled Successfully assigned kuttl-test-cuddly-crab/my-jaeger-collector-5489f5bd9b-hwg6l to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:35 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5489f5bd9b SuccessfulCreate Created pod: my-jaeger-collector-5489f5bd9b-hwg6l replicaset-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:35 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-5489f5bd9b to 1 deployment-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:35 +0000 UTC Normal Pod my-jaeger-query-74db8f4dfd-l224n Binding Scheduled Successfully assigned kuttl-test-cuddly-crab/my-jaeger-query-74db8f4dfd-l224n to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:35 +0000 UTC Normal Pod my-jaeger-query-74db8f4dfd-l224n AddedInterface Add eth0 [10.128.2.34/23] from ovn-kubernetes logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:35 +0000 UTC Normal Pod my-jaeger-query-74db8f4dfd-l224n.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:35 +0000 UTC Normal Pod my-jaeger-query-74db8f4dfd-l224n.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:35 +0000 UTC Normal Pod my-jaeger-query-74db8f4dfd-l224n.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:35 +0000 UTC Normal Pod my-jaeger-query-74db8f4dfd-l224n.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:35 +0000 UTC Normal Pod my-jaeger-query-74db8f4dfd-l224n.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:35 +0000 UTC Normal Pod my-jaeger-query-74db8f4dfd-l224n.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:35 +0000 UTC Normal Pod my-jaeger-query-74db8f4dfd-l224n.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:35 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-74db8f4dfd SuccessfulCreate Created pod: my-jaeger-query-74db8f4dfd-l224n replicaset-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:35 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-74db8f4dfd to 1 deployment-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:36 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-hwg6l AddedInterface Add eth0 [10.128.2.33/23] from ovn-kubernetes logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:36 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-hwg6l.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:36 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-hwg6l.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:36 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-hwg6l.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:36 +0000 UTC Normal Pod my-jaeger-query-74db8f4dfd-l224n.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:36 +0000 UTC Normal Pod my-jaeger-query-74db8f4dfd-l224n.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:40 +0000 UTC Normal Pod my-jaeger-query-74db8f4dfd-l224n.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:40 +0000 UTC Normal Pod my-jaeger-query-74db8f4dfd-l224n.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:40 +0000 UTC Normal Pod my-jaeger-query-74db8f4dfd-l224n.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:40 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-74db8f4dfd SuccessfulDelete Deleted pod: my-jaeger-query-74db8f4dfd-l224n replicaset-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:40 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-74db8f4dfd to 0 from 1 deployment-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:41 +0000 UTC Normal Pod my-jaeger-query-dc89659b6-zt7n8 Binding Scheduled Successfully assigned kuttl-test-cuddly-crab/my-jaeger-query-dc89659b6-zt7n8 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:41 +0000 UTC Normal Pod my-jaeger-query-dc89659b6-zt7n8 AddedInterface Add eth0 [10.128.2.35/23] from ovn-kubernetes logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:41 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-dc89659b6 SuccessfulCreate Created pod: my-jaeger-query-dc89659b6-zt7n8 replicaset-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:41 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-dc89659b6 to 1 deployment-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:42 +0000 UTC Normal Pod my-jaeger-query-dc89659b6-zt7n8.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:42 +0000 UTC Normal Pod my-jaeger-query-dc89659b6-zt7n8.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:42 +0000 UTC Normal Pod my-jaeger-query-dc89659b6-zt7n8.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:42 +0000 UTC Normal Pod my-jaeger-query-dc89659b6-zt7n8.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:42 +0000 UTC Normal Pod my-jaeger-query-dc89659b6-zt7n8.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:42 +0000 UTC Normal Pod my-jaeger-query-dc89659b6-zt7n8.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:42 +0000 UTC Normal Pod my-jaeger-query-dc89659b6-zt7n8.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:42 +0000 UTC Normal Pod my-jaeger-query-dc89659b6-zt7n8.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:42 +0000 UTC Normal Pod my-jaeger-query-dc89659b6-zt7n8.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:46 +0000 UTC Normal Pod 00-report-span-bt2rp Binding Scheduled Successfully assigned kuttl-test-cuddly-crab/00-report-span-bt2rp to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:46 +0000 UTC Normal Pod 00-report-span-bt2rp AddedInterface Add eth0 [10.131.0.27/23] from ovn-kubernetes logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:46 +0000 UTC Normal Pod 00-report-span-bt2rp.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:46 +0000 UTC Normal Pod 00-report-span-bt2rp.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:46 +0000 UTC Normal Pod 00-report-span-bt2rp.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:46 +0000 UTC Normal Job.batch 00-report-span SuccessfulCreate Created pod: 00-report-span-bt2rp job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:50 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:50 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:54:50 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:09 +0000 UTC Normal Pod 00-check-indices-49tqv Binding Scheduled Successfully assigned kuttl-test-cuddly-crab/00-check-indices-49tqv to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:09 +0000 UTC Normal Job.batch 00-check-indices SuccessfulCreate Created pod: 00-check-indices-49tqv job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:09 +0000 UTC Normal Job.batch 00-report-span Completed Job completed job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:10 +0000 UTC Normal Pod 00-check-indices-49tqv AddedInterface Add eth0 [10.131.0.28/23] from ovn-kubernetes logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:10 +0000 UTC Normal Pod 00-check-indices-49tqv.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:10 +0000 UTC Normal Pod 00-check-indices-49tqv.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:10 +0000 UTC Normal Pod 00-check-indices-49tqv.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:13 +0000 UTC Normal Job.batch 00-check-indices Completed Job completed job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:14 +0000 UTC Normal Pod 01-check-indices-hfzpw Binding Scheduled Successfully assigned kuttl-test-cuddly-crab/01-check-indices-hfzpw to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:14 +0000 UTC Normal Pod 01-check-indices-hfzpw AddedInterface Add eth0 [10.131.0.29/23] from ovn-kubernetes logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:14 +0000 UTC Normal Pod 01-check-indices-hfzpw.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:14 +0000 UTC Normal Pod 01-check-indices-hfzpw.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:14 +0000 UTC Normal Pod 01-check-indices-hfzpw.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:14 +0000 UTC Normal Job.batch 01-check-indices SuccessfulCreate Created pod: 01-check-indices-hfzpw job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:17 +0000 UTC Normal Job.batch 01-check-indices Completed Job completed job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:19 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-nvvdf Binding Scheduled Successfully assigned kuttl-test-cuddly-crab/my-jaeger-es-rollover-create-mapping-nvvdf to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:19 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-nvvdf AddedInterface Add eth0 [10.131.0.30/23] from ovn-kubernetes logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:19 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-nvvdf.spec.containers{my-jaeger-es-rollover-create-mapping} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:edda53eae462f604654d8adc71622101d2f62c9de6ecce4484523341b972723a" kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:19 +0000 UTC Normal Job.batch my-jaeger-es-rollover-create-mapping SuccessfulCreate Created pod: my-jaeger-es-rollover-create-mapping-nvvdf job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:20 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:20 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod my-jaeger-collector-5489f5bd9b-hwg6l horizontal-pod-autoscaler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:20 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:23 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-nvvdf.spec.containers{my-jaeger-es-rollover-create-mapping} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:edda53eae462f604654d8adc71622101d2f62c9de6ecce4484523341b972723a" in 3.701s (3.701s including waiting) kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:23 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-nvvdf.spec.containers{my-jaeger-es-rollover-create-mapping} Created Created container my-jaeger-es-rollover-create-mapping kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:23 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-nvvdf.spec.containers{my-jaeger-es-rollover-create-mapping} Started Started container my-jaeger-es-rollover-create-mapping kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:26 +0000 UTC Normal Job.batch my-jaeger-es-rollover-create-mapping Completed Job completed job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:27 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-hwg6l.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:27 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5489f5bd9b SuccessfulDelete Deleted pod: my-jaeger-collector-5489f5bd9b-hwg6l replicaset-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:27 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled down replica set my-jaeger-collector-5489f5bd9b to 0 from 1 deployment-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:27 +0000 UTC Normal Pod my-jaeger-query-dc89659b6-zt7n8.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:27 +0000 UTC Normal Pod my-jaeger-query-dc89659b6-zt7n8.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:27 +0000 UTC Normal Pod my-jaeger-query-dc89659b6-zt7n8.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:27 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-dc89659b6 SuccessfulDelete Deleted pod: my-jaeger-query-dc89659b6-zt7n8 replicaset-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:27 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-dc89659b6 to 0 from 1 deployment-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:28 +0000 UTC Normal Pod my-jaeger-collector-7794fb6d5c-prvqc Binding Scheduled Successfully assigned kuttl-test-cuddly-crab/my-jaeger-collector-7794fb6d5c-prvqc to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:28 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-7794fb6d5c SuccessfulCreate Created pod: my-jaeger-collector-7794fb6d5c-prvqc replicaset-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:28 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-7794fb6d5c to 1 deployment-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:28 +0000 UTC Normal Pod my-jaeger-query-6f7659cf8c-vs2st Binding Scheduled Successfully assigned kuttl-test-cuddly-crab/my-jaeger-query-6f7659cf8c-vs2st to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:28 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-6f7659cf8c SuccessfulCreate Created pod: my-jaeger-query-6f7659cf8c-vs2st replicaset-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:28 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-6f7659cf8c to 1 deployment-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:29 +0000 UTC Normal Pod my-jaeger-collector-7794fb6d5c-prvqc AddedInterface Add eth0 [10.131.0.31/23] from ovn-kubernetes logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:29 +0000 UTC Normal Pod my-jaeger-collector-7794fb6d5c-prvqc.spec.containers{jaeger-collector} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:29 +0000 UTC Normal Pod my-jaeger-query-6f7659cf8c-vs2st AddedInterface Add eth0 [10.128.2.36/23] from ovn-kubernetes logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:29 +0000 UTC Normal Pod my-jaeger-query-6f7659cf8c-vs2st.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:29 +0000 UTC Normal Pod my-jaeger-query-6f7659cf8c-vs2st.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:29 +0000 UTC Normal Pod my-jaeger-query-6f7659cf8c-vs2st.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:29 +0000 UTC Normal Pod my-jaeger-query-6f7659cf8c-vs2st.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:29 +0000 UTC Normal Pod my-jaeger-query-6f7659cf8c-vs2st.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:29 +0000 UTC Normal Pod my-jaeger-query-6f7659cf8c-vs2st.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:29 +0000 UTC Normal Pod my-jaeger-query-6f7659cf8c-vs2st.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:29 +0000 UTC Normal Pod my-jaeger-query-6f7659cf8c-vs2st.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:29 +0000 UTC Normal Pod my-jaeger-query-6f7659cf8c-vs2st.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:31 +0000 UTC Normal Pod my-jaeger-collector-7794fb6d5c-prvqc.spec.containers{jaeger-collector} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" in 1.954s (1.954s including waiting) kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:31 +0000 UTC Normal Pod my-jaeger-collector-7794fb6d5c-prvqc.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:31 +0000 UTC Normal Pod my-jaeger-collector-7794fb6d5c-prvqc.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:35 +0000 UTC Normal Pod 02-report-span-d5fpv Binding Scheduled Successfully assigned kuttl-test-cuddly-crab/02-report-span-d5fpv to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:35 +0000 UTC Normal Pod 02-report-span-d5fpv AddedInterface Add eth0 [10.131.0.32/23] from ovn-kubernetes logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:35 +0000 UTC Normal Pod 02-report-span-d5fpv.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:35 +0000 UTC Normal Pod 02-report-span-d5fpv.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:35 +0000 UTC Normal Pod 02-report-span-d5fpv.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:35 +0000 UTC Normal Job.batch 02-report-span SuccessfulCreate Created pod: 02-report-span-d5fpv job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:58 +0000 UTC Normal Job.batch 02-report-span Completed Job completed job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:59 +0000 UTC Normal Pod 02-check-indices-qt6sc Binding Scheduled Successfully assigned kuttl-test-cuddly-crab/02-check-indices-qt6sc to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:55:59 +0000 UTC Normal Job.batch 02-check-indices SuccessfulCreate Created pod: 02-check-indices-qt6sc job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:00 +0000 UTC Normal Pod 02-check-indices-qt6sc AddedInterface Add eth0 [10.131.0.33/23] from ovn-kubernetes logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:00 +0000 UTC Normal Pod 02-check-indices-qt6sc.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:00 +0000 UTC Normal Pod 02-check-indices-qt6sc.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:00 +0000 UTC Normal Pod 02-check-indices-qt6sc.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28361216-l6xbm Binding Scheduled Successfully assigned kuttl-test-cuddly-crab/my-jaeger-es-lookback-28361216-l6xbm to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28361216-l6xbm AddedInterface Add eth0 [10.131.0.34/23] from ovn-kubernetes logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28361216-l6xbm.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:edda53eae462f604654d8adc71622101d2f62c9de6ecce4484523341b972723a" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28361216-l6xbm.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28361216-l6xbm.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28361216 SuccessfulCreate Created pod: my-jaeger-es-lookback-28361216-l6xbm job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28361216 cronjob-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28361216-5ztvx Binding Scheduled Successfully assigned kuttl-test-cuddly-crab/my-jaeger-es-rollover-28361216-5ztvx to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28361216-5ztvx AddedInterface Add eth0 [10.128.2.37/23] from ovn-kubernetes logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28361216-5ztvx.spec.containers{my-jaeger-es-rollover} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:edda53eae462f604654d8adc71622101d2f62c9de6ecce4484523341b972723a" kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28361216 SuccessfulCreate Created pod: my-jaeger-es-rollover-28361216-5ztvx job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28361216 cronjob-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:02 +0000 UTC Normal Job.batch 02-check-indices Completed Job completed job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:03 +0000 UTC Normal Pod 03-check-indices-mtvcl Binding Scheduled Successfully assigned kuttl-test-cuddly-crab/03-check-indices-mtvcl to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:03 +0000 UTC Normal Pod 03-check-indices-mtvcl AddedInterface Add eth0 [10.131.0.35/23] from ovn-kubernetes logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:03 +0000 UTC Normal Pod 03-check-indices-mtvcl.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:03 +0000 UTC Normal Pod 03-check-indices-mtvcl.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:03 +0000 UTC Normal Pod 03-check-indices-mtvcl.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:03 +0000 UTC Normal Job.batch 03-check-indices SuccessfulCreate Created pod: 03-check-indices-mtvcl job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28361216 Completed Job completed job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28361216, status: Complete cronjob-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:03 +0000 UTC Normal Pod my-jaeger-es-rollover-28361216-5ztvx.spec.containers{my-jaeger-es-rollover} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:edda53eae462f604654d8adc71622101d2f62c9de6ecce4484523341b972723a" in 2.987s (2.987s including waiting) kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:03 +0000 UTC Normal Pod my-jaeger-es-rollover-28361216-5ztvx.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:03 +0000 UTC Normal Pod my-jaeger-es-rollover-28361216-5ztvx.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:05 +0000 UTC Normal Job.batch 03-check-indices Completed Job completed job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:05 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28361216 Completed Job completed job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:05 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28361216, status: Complete cronjob-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:06 +0000 UTC Normal Pod 04-check-indices-ngb4w Binding Scheduled Successfully assigned kuttl-test-cuddly-crab/04-check-indices-ngb4w to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:06 +0000 UTC Normal Pod 04-check-indices-ngb4w AddedInterface Add eth0 [10.131.0.36/23] from ovn-kubernetes logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:06 +0000 UTC Normal Pod 04-check-indices-ngb4w.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:06 +0000 UTC Normal Pod 04-check-indices-ngb4w.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:06 +0000 UTC Normal Pod 04-check-indices-ngb4w.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:06 +0000 UTC Normal Job.batch 04-check-indices SuccessfulCreate Created pod: 04-check-indices-ngb4w job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:09 +0000 UTC Normal Job.batch 04-check-indices Completed Job completed job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:18 +0000 UTC Normal Pod 03-report-span-j26vg Binding Scheduled Successfully assigned kuttl-test-cuddly-crab/03-report-span-j26vg to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:18 +0000 UTC Normal Job.batch 03-report-span SuccessfulCreate Created pod: 03-report-span-j26vg job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:19 +0000 UTC Normal Pod 03-report-span-j26vg AddedInterface Add eth0 [10.131.0.37/23] from ovn-kubernetes logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:19 +0000 UTC Normal Pod 03-report-span-j26vg.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:19 +0000 UTC Normal Pod 03-report-span-j26vg.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:19 +0000 UTC Normal Pod 03-report-span-j26vg.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:20 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod my-jaeger-collector-7794fb6d5c-prvqc horizontal-pod-autoscaler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:56:41 +0000 UTC Normal Job.batch 03-report-span Completed Job completed job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28361217-xnp5n Binding Scheduled Successfully assigned kuttl-test-cuddly-crab/my-jaeger-es-lookback-28361217-xnp5n to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28361217-xnp5n AddedInterface Add eth0 [10.131.0.38/23] from ovn-kubernetes logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28361217-xnp5n.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:edda53eae462f604654d8adc71622101d2f62c9de6ecce4484523341b972723a" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28361217-xnp5n.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28361217-xnp5n.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28361217 SuccessfulCreate Created pod: my-jaeger-es-lookback-28361217-xnp5n job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28361217 cronjob-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28361217-bbrs4 Binding Scheduled Successfully assigned kuttl-test-cuddly-crab/my-jaeger-es-rollover-28361217-bbrs4 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28361217-bbrs4 AddedInterface Add eth0 [10.128.2.38/23] from ovn-kubernetes logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28361217-bbrs4.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:edda53eae462f604654d8adc71622101d2f62c9de6ecce4484523341b972723a" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28361217-bbrs4.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28361217-bbrs4.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28361217 SuccessfulCreate Created pod: my-jaeger-es-rollover-28361217-bbrs4 job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28361217 cronjob-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:02 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28361217 Completed Job completed job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:02 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28361217, status: Complete cronjob-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28361217 Completed Job completed job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28361217, status: Complete cronjob-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:11 +0000 UTC Normal Pod 05-check-indices-xq96t Binding Scheduled Successfully assigned kuttl-test-cuddly-crab/05-check-indices-xq96t to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:11 +0000 UTC Normal Job.batch 05-check-indices SuccessfulCreate Created pod: 05-check-indices-xq96t job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:12 +0000 UTC Normal Pod 05-check-indices-xq96t AddedInterface Add eth0 [10.131.0.39/23] from ovn-kubernetes logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:12 +0000 UTC Normal Pod 05-check-indices-xq96t.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:12 +0000 UTC Normal Pod 05-check-indices-xq96t.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:12 +0000 UTC Normal Pod 05-check-indices-xq96t.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:14 +0000 UTC Normal Job.batch 05-check-indices Completed Job completed job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:16 +0000 UTC Normal Pod 06-check-indices-5sk8z Binding Scheduled Successfully assigned kuttl-test-cuddly-crab/06-check-indices-5sk8z to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:16 +0000 UTC Normal Pod 06-check-indices-5sk8z AddedInterface Add eth0 [10.131.0.40/23] from ovn-kubernetes logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:16 +0000 UTC Normal Pod 06-check-indices-5sk8z.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:16 +0000 UTC Normal Pod 06-check-indices-5sk8z.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:16 +0000 UTC Normal Pod 06-check-indices-5sk8z.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:16 +0000 UTC Normal Job.batch 06-check-indices SuccessfulCreate Created pod: 06-check-indices-5sk8z job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | 2023-12-04 06:57:18 +0000 UTC Normal Job.batch 06-check-indices Completed Job completed job-controller logger.go:42: 06:57:19 | es-rollover-autoprov | Deleting namespace: kuttl-test-cuddly-crab === CONT kuttl/harness/es-increasing-replicas logger.go:42: 06:57:27 | es-increasing-replicas | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:57:27 | es-increasing-replicas | Ignoring check-es-nodes.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:57:27 | es-increasing-replicas | Ignoring openshift-check-es-nodes.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:57:27 | es-increasing-replicas | Creating namespace: kuttl-test-viable-buzzard logger.go:42: 06:57:27 | es-increasing-replicas/1-install | starting test step 1-install logger.go:42: 06:57:27 | es-increasing-replicas/1-install | Jaeger:kuttl-test-viable-buzzard/simple-prod created logger.go:42: 06:58:03 | es-increasing-replicas/1-install | test step completed 1-install logger.go:42: 06:58:03 | es-increasing-replicas/2-install | starting test step 2-install logger.go:42: 06:58:03 | es-increasing-replicas/2-install | Jaeger:kuttl-test-viable-buzzard/simple-prod updated logger.go:42: 06:58:08 | es-increasing-replicas/2-install | test step completed 2-install logger.go:42: 06:58:08 | es-increasing-replicas/3-smoke-test | starting test step 3-smoke-test logger.go:42: 06:58:08 | es-increasing-replicas/3-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 06:58:09 | es-increasing-replicas/3-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 06:58:15 | es-increasing-replicas/3-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 06:58:16 | es-increasing-replicas/3-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 06:58:16 | es-increasing-replicas/3-smoke-test | job.batch/report-span created logger.go:42: 06:58:16 | es-increasing-replicas/3-smoke-test | job.batch/check-span created logger.go:42: 06:58:28 | es-increasing-replicas/3-smoke-test | test step completed 3-smoke-test logger.go:42: 06:58:28 | es-increasing-replicas/4-install | starting test step 4-install logger.go:42: 06:58:29 | es-increasing-replicas/4-install | Jaeger:kuttl-test-viable-buzzard/simple-prod updated logger.go:42: 06:58:29 | es-increasing-replicas/4-install | test step completed 4-install logger.go:42: 06:58:29 | es-increasing-replicas/5-check-es-nodes | starting test step 5-check-es-nodes logger.go:42: 06:58:29 | es-increasing-replicas/5-check-es-nodes | running command: [sh -c ./check-es-nodes.sh $NAMESPACE] logger.go:42: 06:58:29 | es-increasing-replicas/5-check-es-nodes | Checking if the number of ES instances is the expected logger.go:42: 06:58:29 | es-increasing-replicas/5-check-es-nodes | false logger.go:42: 06:58:29 | es-increasing-replicas/5-check-es-nodes | Error: no matches found logger.go:42: 06:58:34 | es-increasing-replicas/5-check-es-nodes | Checking if the number of ES instances is the expected logger.go:42: 06:58:34 | es-increasing-replicas/5-check-es-nodes | true logger.go:42: 06:58:34 | es-increasing-replicas/5-check-es-nodes | test step completed 5-check-es-nodes logger.go:42: 06:58:34 | es-increasing-replicas | es-increasing-replicas events from ns kuttl-test-viable-buzzard: logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:57:33 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-1-85787c8588 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-1-85787jwwdf replicaset-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:57:33 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-1-85787jwwdf Binding Scheduled Successfully assigned kuttl-test-viable-buzzard/elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-1-85787jwwdf to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:57:33 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-1-85787jwwdf FailedMount MountVolume.SetUp failed for volume "elasticsearch-metrics" : secret "elasticsearch-metrics" not found kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:57:33 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-1-85787c8588 to 1 deployment-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:57:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-1-85787jwwdf AddedInterface Add eth0 [10.129.2.21/23] from ovn-kubernetes logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:57:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-1-85787jwwdf.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:57:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-1-85787jwwdf.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:57:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-1-85787jwwdf.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:57:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-1-85787jwwdf.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:57:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-1-85787jwwdf.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:57:34 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-1-85787jwwdf.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:57:44 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-1-85787jwwdf.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:57:49 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-1-85787jwwdf.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:00 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-hj54z Binding Scheduled Successfully assigned kuttl-test-viable-buzzard/simple-prod-collector-77fcbdc546-hj54z to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:00 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-77fcbdc546 SuccessfulCreate Created pod: simple-prod-collector-77fcbdc546-hj54z replicaset-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:00 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-77fcbdc546 to 1 deployment-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:00 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-6rvqh Binding Scheduled Successfully assigned kuttl-test-viable-buzzard/simple-prod-query-9bc77fcfb-6rvqh to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:00 +0000 UTC Warning Pod simple-prod-query-9bc77fcfb-6rvqh FailedMount MountVolume.SetUp failed for volume "simple-prod-ui-oauth-proxy-tls" : secret "simple-prod-ui-oauth-proxy-tls" not found kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:00 +0000 UTC Normal ReplicaSet.apps simple-prod-query-9bc77fcfb SuccessfulCreate Created pod: simple-prod-query-9bc77fcfb-6rvqh replicaset-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:00 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-9bc77fcfb to 1 deployment-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:01 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-hj54z AddedInterface Add eth0 [10.128.2.39/23] from ovn-kubernetes logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:01 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-hj54z.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:01 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-hj54z.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:01 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-hj54z.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:01 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-6rvqh AddedInterface Add eth0 [10.128.2.40/23] from ovn-kubernetes logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:01 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-6rvqh.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:01 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-6rvqh.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:01 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-6rvqh.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:01 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-6rvqh.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:01 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-6rvqh.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:01 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-6rvqh.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:01 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-6rvqh.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:01 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-6rvqh.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:01 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-6rvqh.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:05 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-sjxl5 Binding Scheduled Successfully assigned kuttl-test-viable-buzzard/simple-prod-collector-77fcbdc546-sjxl5 to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:05 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-sjxl5 AddedInterface Add eth0 [10.131.0.41/23] from ovn-kubernetes logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:05 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-sjxl5.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:05 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-sjxl5.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:05 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-sjxl5.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:05 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-77fcbdc546 SuccessfulCreate Created pod: simple-prod-collector-77fcbdc546-sjxl5 replicaset-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:05 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-77fcbdc546 to 2 from 1 deployment-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:05 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-2pg6c Binding Scheduled Successfully assigned kuttl-test-viable-buzzard/simple-prod-query-9bc77fcfb-2pg6c to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:05 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-2pg6c AddedInterface Add eth0 [10.131.0.42/23] from ovn-kubernetes logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:05 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-2pg6c.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:05 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-2pg6c.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:05 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-2pg6c.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:05 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-2pg6c.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:05 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-2pg6c.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:05 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-2pg6c.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:05 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-2pg6c.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:05 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-2pg6c.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:05 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-2pg6c.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:05 +0000 UTC Normal ReplicaSet.apps simple-prod-query-9bc77fcfb SuccessfulCreate Created pod: simple-prod-query-9bc77fcfb-2pg6c replicaset-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:05 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-9bc77fcfb to 2 from 1 deployment-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:12 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-2pg6c.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:12 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-2pg6c.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:12 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-2pg6c.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:12 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-6rvqh.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:12 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-6rvqh.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:12 +0000 UTC Normal Pod simple-prod-query-9bc77fcfb-6rvqh.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:12 +0000 UTC Normal ReplicaSet.apps simple-prod-query-9bc77fcfb SuccessfulDelete Deleted pod: simple-prod-query-9bc77fcfb-2pg6c replicaset-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:12 +0000 UTC Normal ReplicaSet.apps simple-prod-query-9bc77fcfb SuccessfulDelete Deleted pod: simple-prod-query-9bc77fcfb-6rvqh replicaset-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:12 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-9bc77fcfb to 0 from 2 deployment-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5gfth Binding Scheduled Successfully assigned kuttl-test-viable-buzzard/simple-prod-query-7b9f87ff96-5gfth to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5gfth AddedInterface Add eth0 [10.131.0.43/23] from ovn-kubernetes logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5gfth.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5gfth.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5gfth.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5gfth.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5gfth.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5gfth.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5gfth.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5gfth.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5gfth.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5hznf Binding Scheduled Successfully assigned kuttl-test-viable-buzzard/simple-prod-query-7b9f87ff96-5hznf to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5hznf AddedInterface Add eth0 [10.128.2.41/23] from ovn-kubernetes logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5hznf.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5hznf.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5hznf.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5hznf.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5hznf.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5hznf.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5hznf.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5hznf.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5hznf.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7b9f87ff96 SuccessfulCreate Created pod: simple-prod-query-7b9f87ff96-5hznf replicaset-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7b9f87ff96 SuccessfulCreate Created pod: simple-prod-query-7b9f87ff96-5gfth replicaset-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:13 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-7b9f87ff96 to 2 deployment-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:16 +0000 UTC Normal Pod check-span-m5vm7 Binding Scheduled Successfully assigned kuttl-test-viable-buzzard/check-span-m5vm7 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:16 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-m5vm7 job-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:16 +0000 UTC Normal Pod report-span-ttk7b Binding Scheduled Successfully assigned kuttl-test-viable-buzzard/report-span-ttk7b to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:16 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-ttk7b job-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:17 +0000 UTC Normal Pod check-span-m5vm7 AddedInterface Add eth0 [10.128.2.43/23] from ovn-kubernetes logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:17 +0000 UTC Normal Pod check-span-m5vm7.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:17 +0000 UTC Normal Pod check-span-m5vm7.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:17 +0000 UTC Normal Pod check-span-m5vm7.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:17 +0000 UTC Normal Pod report-span-ttk7b AddedInterface Add eth0 [10.128.2.42/23] from ovn-kubernetes logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:17 +0000 UTC Normal Pod report-span-ttk7b.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:17 +0000 UTC Normal Pod report-span-ttk7b.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:17 +0000 UTC Normal Pod report-span-ttk7b.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:28 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-2-68b8d68bdd SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-2-68b8dcts6g replicaset-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-2-68b8dcts6g Binding Scheduled Successfully assigned kuttl-test-viable-buzzard/elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-2-68b8dcts6g to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-2-68b8dcts6g AddedInterface Add eth0 [10.131.0.44/23] from ovn-kubernetes logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-2-68b8dcts6g.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-2-68b8dcts6g.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-2-68b8dcts6g.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-2-68b8dcts6g.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-2 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-2-68b8d68bdd to 1 deployment-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-hj54z.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-sjxl5.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-77fcbdc546 SuccessfulDelete Deleted pod: simple-prod-collector-77fcbdc546-hj54z replicaset-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-77fcbdc546 SuccessfulDelete Deleted pod: simple-prod-collector-77fcbdc546-sjxl5 replicaset-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled down replica set simple-prod-collector-77fcbdc546 to 0 from 2 deployment-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5gfth.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5gfth.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5gfth.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5hznf.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5hznf.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal Pod simple-prod-query-7b9f87ff96-5hznf.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7b9f87ff96 SuccessfulDelete Deleted pod: simple-prod-query-7b9f87ff96-5gfth replicaset-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7b9f87ff96 SuccessfulDelete Deleted pod: simple-prod-query-7b9f87ff96-5hznf replicaset-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:30 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-7b9f87ff96 to 0 from 2 deployment-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-2-68b8dcts6g.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestviablebuzzardsimpleprod-2-68b8dcts6g.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-6x8cq Binding Scheduled Successfully assigned kuttl-test-viable-buzzard/simple-prod-collector-5db88495b5-6x8cq to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-6x8cq AddedInterface Add eth0 [10.131.0.45/23] from ovn-kubernetes logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-6x8cq.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-lx4tl Binding Scheduled Successfully assigned kuttl-test-viable-buzzard/simple-prod-collector-5db88495b5-lx4tl to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-lx4tl AddedInterface Add eth0 [10.128.2.44/23] from ovn-kubernetes logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-lx4tl.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-lx4tl.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-lx4tl.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5db88495b5 SuccessfulCreate Created pod: simple-prod-collector-5db88495b5-lx4tl replicaset-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5db88495b5 SuccessfulCreate Created pod: simple-prod-collector-5db88495b5-6x8cq replicaset-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-5db88495b5 to 2 deployment-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal Pod simple-prod-query-7597f7fff-vcsdc Binding Scheduled Successfully assigned kuttl-test-viable-buzzard/simple-prod-query-7597f7fff-vcsdc to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal Pod simple-prod-query-7597f7fff-xjd9h Binding Scheduled Successfully assigned kuttl-test-viable-buzzard/simple-prod-query-7597f7fff-xjd9h to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal Pod simple-prod-query-7597f7fff-xjd9h AddedInterface Add eth0 [10.128.2.45/23] from ovn-kubernetes logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal Pod simple-prod-query-7597f7fff-xjd9h.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal Pod simple-prod-query-7597f7fff-xjd9h.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal Pod simple-prod-query-7597f7fff-xjd9h.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7597f7fff SuccessfulCreate Created pod: simple-prod-query-7597f7fff-xjd9h replicaset-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7597f7fff SuccessfulCreate Created pod: simple-prod-query-7597f7fff-vcsdc replicaset-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:31 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-7597f7fff to 2 deployment-controller logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:32 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-6x8cq.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:32 +0000 UTC Normal Pod simple-prod-collector-5db88495b5-6x8cq.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:32 +0000 UTC Warning Pod simple-prod-collector-5db88495b5-lx4tl.spec.containers{jaeger-collector} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:32 +0000 UTC Normal Pod simple-prod-query-7597f7fff-vcsdc AddedInterface Add eth0 [10.131.0.46/23] from ovn-kubernetes logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:32 +0000 UTC Normal Pod simple-prod-query-7597f7fff-vcsdc.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:32 +0000 UTC Normal Pod simple-prod-query-7597f7fff-vcsdc.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:32 +0000 UTC Normal Pod simple-prod-query-7597f7fff-vcsdc.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:32 +0000 UTC Normal Pod simple-prod-query-7597f7fff-vcsdc.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:32 +0000 UTC Normal Pod simple-prod-query-7597f7fff-vcsdc.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:32 +0000 UTC Normal Pod simple-prod-query-7597f7fff-vcsdc.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:32 +0000 UTC Normal Pod simple-prod-query-7597f7fff-vcsdc.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:32 +0000 UTC Normal Pod simple-prod-query-7597f7fff-vcsdc.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:32 +0000 UTC Normal Pod simple-prod-query-7597f7fff-vcsdc.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:32 +0000 UTC Normal Pod simple-prod-query-7597f7fff-xjd9h.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:32 +0000 UTC Normal Pod simple-prod-query-7597f7fff-xjd9h.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:32 +0000 UTC Normal Pod simple-prod-query-7597f7fff-xjd9h.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:32 +0000 UTC Normal Pod simple-prod-query-7597f7fff-xjd9h.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:32 +0000 UTC Normal Pod simple-prod-query-7597f7fff-xjd9h.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 06:58:34 | es-increasing-replicas | 2023-12-04 06:58:32 +0000 UTC Normal Pod simple-prod-query-7597f7fff-xjd9h.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 06:58:34 | es-increasing-replicas | Deleting namespace: kuttl-test-viable-buzzard === CONT kuttl/harness/es-index-cleaner-autoprov logger.go:42: 06:59:10 | es-index-cleaner-autoprov | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 06:59:10 | es-index-cleaner-autoprov | Creating namespace: kuttl-test-powerful-teal logger.go:42: 06:59:10 | es-index-cleaner-autoprov/1-install | starting test step 1-install logger.go:42: 06:59:10 | es-index-cleaner-autoprov/1-install | Jaeger:kuttl-test-powerful-teal/test-es-index-cleaner-with-prefix created logger.go:42: 06:59:46 | es-index-cleaner-autoprov/1-install | test step completed 1-install logger.go:42: 06:59:46 | es-index-cleaner-autoprov/2-report-spans | starting test step 2-report-spans logger.go:42: 06:59:46 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE test-es-index-cleaner-with-prefix /dev/null] logger.go:42: 06:59:49 | es-index-cleaner-autoprov/2-report-spans | Warning: resource jaegers/test-es-index-cleaner-with-prefix is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 06:59:55 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c DAYS=5 ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JOB_NUMBER=00 JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-00-job.yaml] logger.go:42: 06:59:56 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c kubectl apply -f report-span-00-job.yaml -n $NAMESPACE] logger.go:42: 06:59:56 | es-index-cleaner-autoprov/2-report-spans | job.batch/00-report-span created logger.go:42: 07:00:35 | es-index-cleaner-autoprov/2-report-spans | test step completed 2-report-spans logger.go:42: 07:00:35 | es-index-cleaner-autoprov/3-install | starting test step 3-install logger.go:42: 07:00:35 | es-index-cleaner-autoprov/3-install | Jaeger:kuttl-test-powerful-teal/test-es-index-cleaner-with-prefix updated logger.go:42: 07:00:35 | es-index-cleaner-autoprov/3-install | test step completed 3-install logger.go:42: 07:00:35 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | starting test step 4-wait-es-index-cleaner logger.go:42: 07:00:35 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | running command: [sh -c go run ../../../../cmd-utils/wait-cronjob/main.go --cronjob test-es-index-cleaner-with-prefix-es-index-cleaner --namespace $NAMESPACE] logger.go:42: 07:00:36 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-04T07:00:36Z" level=debug msg="Checking if the test-es-index-cleaner-with-prefix-es-index-cleaner CronJob exists" logger.go:42: 07:00:36 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-04T07:00:36Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 07:00:36 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-04T07:00:36Z" level=warning msg="The BatchV1/Cronjob test-es-index-cleaner-with-prefix-es-index-cleaner was not found" logger.go:42: 07:00:36 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-04T07:00:36Z" level=debug msg="Found BatchV/Cronjobs:" logger.go:42: 07:00:36 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-04T07:00:36Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 07:00:36 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-04T07:00:36Z" level=warning msg="The BatchV1/Cronjob test-es-index-cleaner-with-prefix-es-index-cleaner was not found" logger.go:42: 07:00:36 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-04T07:00:36Z" level=debug msg="Found BatchV/Cronjobs:" logger.go:42: 07:00:46 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-04T07:00:46Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 07:00:46 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-04T07:00:46Z" level=info msg="Cronjob test-es-index-cleaner-with-prefix-es-index-cleaner found successfully" logger.go:42: 07:00:46 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-04T07:00:46Z" level=debug msg="Waiting for the next scheduled job from test-es-index-cleaner-with-prefix-es-index-cleaner cronjob" logger.go:42: 07:00:46 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-04T07:00:46Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 07:00:46 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-04T07:00:46Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 07:00:56 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-04T07:00:56Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 07:01:06 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-04T07:01:06Z" level=debug msg="Waiting for next job from test-es-index-cleaner-with-prefix-es-index-cleaner to succeed" logger.go:42: 07:01:16 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | time="2023-12-04T07:01:16Z" level=info msg="Job of owner test-es-index-cleaner-with-prefix-es-index-cleaner succeeded after test-es-index-cleaner-with-prefix-es-index-cleaner 30.071660842s" logger.go:42: 07:01:16 | es-index-cleaner-autoprov/4-wait-es-index-cleaner | test step completed 4-wait-es-index-cleaner logger.go:42: 07:01:16 | es-index-cleaner-autoprov/5-install | starting test step 5-install logger.go:42: 07:01:16 | es-index-cleaner-autoprov/5-install | Jaeger:kuttl-test-powerful-teal/test-es-index-cleaner-with-prefix updated logger.go:42: 07:01:16 | es-index-cleaner-autoprov/5-install | test step completed 5-install logger.go:42: 07:01:16 | es-index-cleaner-autoprov/6-check-indices | starting test step 6-check-indices logger.go:42: 07:01:16 | es-index-cleaner-autoprov/6-check-indices | Job:kuttl-test-powerful-teal/00-check-indices created logger.go:42: 07:01:20 | es-index-cleaner-autoprov/6-check-indices | test step completed 6-check-indices logger.go:42: 07:01:20 | es-index-cleaner-autoprov | es-index-cleaner-autoprov events from ns kuttl-test-powerful-teal: logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:16 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestpowerfultealtestesindexclea-1-786df9f48b SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestpowerfultealtestesindexclea-1-7xhv2z replicaset-controller logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:16 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpowerfultealtestesindexclea-1-7xhv2z Binding Scheduled Successfully assigned kuttl-test-powerful-teal/elasticsearch-cdm-kuttltestpowerfultealtestesindexclea-1-7xhv2z to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:16 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestpowerfultealtestesindexclea-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestpowerfultealtestesindexclea-1-786df9f48b to 1 deployment-controller logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpowerfultealtestesindexclea-1-7xhv2z AddedInterface Add eth0 [10.129.2.22/23] from ovn-kubernetes logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpowerfultealtestesindexclea-1-7xhv2z.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpowerfultealtestesindexclea-1-7xhv2z.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpowerfultealtestesindexclea-1-7xhv2z.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpowerfultealtestesindexclea-1-7xhv2z.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpowerfultealtestesindexclea-1-7xhv2z.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:17 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpowerfultealtestesindexclea-1-7xhv2z.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:27 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpowerfultealtestesindexclea-1-7xhv2z.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:32 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpowerfultealtestesindexclea-1-7xhv2z.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:43 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-7f88446db8-bc5m8 Binding Scheduled Successfully assigned kuttl-test-powerful-teal/test-es-index-cleaner-with-prefix-collector-7f88446db8-bc5m8 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:43 +0000 UTC Warning Pod test-es-index-cleaner-with-prefix-collector-7f88446db8-bc5m8 FailedMount MountVolume.SetUp failed for volume "test-es-index-cleaner-with-prefix-collector-tls-config-volume" : secret "test-es-index-cleaner-with-prefix-collector-headless-tls" not found kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:43 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-collector-7f88446db8 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-collector-7f88446db8-bc5m8 replicaset-controller logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:43 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-collector ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-collector-7f88446db8 to 1 deployment-controller logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:43 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-55f49695c-jf4h4 Binding Scheduled Successfully assigned kuttl-test-powerful-teal/test-es-index-cleaner-with-prefix-query-55f49695c-jf4h4 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:43 +0000 UTC Warning Pod test-es-index-cleaner-with-prefix-query-55f49695c-jf4h4 FailedMount MountVolume.SetUp failed for volume "test-es-index-cleaner-with-prefix-ui-oauth-proxy-tls" : secret "test-es-index-cleaner-with-prefix-ui-oauth-proxy-tls" not found kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:43 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-55f49695c SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-query-55f49695c-jf4h4 replicaset-controller logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:43 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-query-55f49695c to 1 deployment-controller logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-7f88446db8-bc5m8 AddedInterface Add eth0 [10.128.2.46/23] from ovn-kubernetes logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-7f88446db8-bc5m8.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-7f88446db8-bc5m8.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-7f88446db8-bc5m8.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-55f49695c-jf4h4 AddedInterface Add eth0 [10.128.2.47/23] from ovn-kubernetes logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-55f49695c-jf4h4.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-55f49695c-jf4h4.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-55f49695c-jf4h4.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-55f49695c-jf4h4.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-55f49695c-jf4h4.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-55f49695c-jf4h4.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-55f49695c-jf4h4.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-55f49695c-jf4h4.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:44 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-55f49695c-jf4h4.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:50 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-55f49695c-jf4h4.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:50 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-55f49695c-jf4h4.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:50 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-55f49695c-jf4h4.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:50 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-55f49695c SuccessfulDelete Deleted pod: test-es-index-cleaner-with-prefix-query-55f49695c-jf4h4 replicaset-controller logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:50 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled down replica set test-es-index-cleaner-with-prefix-query-55f49695c to 0 from 1 deployment-controller logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-584865bbd4-bhhcv Binding Scheduled Successfully assigned kuttl-test-powerful-teal/test-es-index-cleaner-with-prefix-query-584865bbd4-bhhcv to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-584865bbd4-bhhcv AddedInterface Add eth0 [10.128.2.48/23] from ovn-kubernetes logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-584865bbd4-bhhcv.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-584865bbd4-bhhcv.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-584865bbd4-bhhcv.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-584865bbd4-bhhcv.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-584865bbd4-bhhcv.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-584865bbd4-bhhcv.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:51 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-584865bbd4-bhhcv.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:51 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-584865bbd4 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-query-584865bbd4-bhhcv replicaset-controller logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:51 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-query-584865bbd4 to 1 deployment-controller logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:52 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-584865bbd4-bhhcv.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:52 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-584865bbd4-bhhcv.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:56 +0000 UTC Normal Pod 00-report-span-mwc95 Binding Scheduled Successfully assigned kuttl-test-powerful-teal/00-report-span-mwc95 to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:56 +0000 UTC Normal Job.batch 00-report-span SuccessfulCreate Created pod: 00-report-span-mwc95 job-controller logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:57 +0000 UTC Normal Pod 00-report-span-mwc95 AddedInterface Add eth0 [10.131.0.47/23] from ovn-kubernetes logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:57 +0000 UTC Normal Pod 00-report-span-mwc95.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:57 +0000 UTC Normal Pod 00-report-span-mwc95.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:57 +0000 UTC Normal Pod 00-report-span-mwc95.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:58 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:58 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 06:59:58 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 07:00:34 +0000 UTC Normal Job.batch 00-report-span Completed Job completed job-controller logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 07:00:43 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 07:00:43 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod test-es-index-cleaner-with-prefix-collector-7f88446db8-bc5m8 horizontal-pod-autoscaler logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 07:00:43 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 07:01:00 +0000 UTC Normal Job.batch test-es-index-cleaner-with-prefix-es-index-cleaner-28361221 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-es-index-cleaner-28361225w9kj job-controller logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 07:01:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-28361225w9kj Binding Scheduled Successfully assigned kuttl-test-powerful-teal/test-es-index-cleaner-with-prefix-es-index-cleaner-28361225w9kj to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 07:01:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-28361225w9kj AddedInterface Add eth0 [10.131.0.48/23] from ovn-kubernetes logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 07:01:00 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-28361225w9kj.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-index-cleaner-rhel8@sha256:08ca2463363916637592e6c1cc1731784e07860269292b216db3e6fd0eb44382" kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 07:01:00 +0000 UTC Normal CronJob.batch test-es-index-cleaner-with-prefix-es-index-cleaner SuccessfulCreate Created job test-es-index-cleaner-with-prefix-es-index-cleaner-28361221 cronjob-controller logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 07:01:04 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-28361225w9kj.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-index-cleaner-rhel8@sha256:08ca2463363916637592e6c1cc1731784e07860269292b216db3e6fd0eb44382" in 3.219s (3.219s including waiting) kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 07:01:04 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-28361225w9kj.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Created Created container test-es-index-cleaner-with-prefix-es-index-cleaner kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 07:01:04 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-es-index-cleaner-28361225w9kj.spec.containers{test-es-index-cleaner-with-prefix-es-index-cleaner} Started Started container test-es-index-cleaner-with-prefix-es-index-cleaner kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 07:01:06 +0000 UTC Normal Job.batch test-es-index-cleaner-with-prefix-es-index-cleaner-28361221 Completed Job completed job-controller logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 07:01:06 +0000 UTC Normal CronJob.batch test-es-index-cleaner-with-prefix-es-index-cleaner SawCompletedJob Saw completed job: test-es-index-cleaner-with-prefix-es-index-cleaner-28361221, status: Complete cronjob-controller logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 07:01:16 +0000 UTC Normal Pod 00-check-indices-p99zr Binding Scheduled Successfully assigned kuttl-test-powerful-teal/00-check-indices-p99zr to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 07:01:16 +0000 UTC Normal Job.batch 00-check-indices SuccessfulCreate Created pod: 00-check-indices-p99zr job-controller logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 07:01:17 +0000 UTC Normal Pod 00-check-indices-p99zr AddedInterface Add eth0 [10.131.0.49/23] from ovn-kubernetes logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 07:01:17 +0000 UTC Normal Pod 00-check-indices-p99zr.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 07:01:17 +0000 UTC Normal Pod 00-check-indices-p99zr.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 07:01:17 +0000 UTC Normal Pod 00-check-indices-p99zr.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:01:20 | es-index-cleaner-autoprov | 2023-12-04 07:01:20 +0000 UTC Normal Job.batch 00-check-indices Completed Job completed job-controller logger.go:42: 07:01:20 | es-index-cleaner-autoprov | Deleting namespace: kuttl-test-powerful-teal === CONT kuttl/harness/es-from-aio-to-production logger.go:42: 07:01:27 | es-from-aio-to-production | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:01:27 | es-from-aio-to-production | Creating namespace: kuttl-test-related-seasnail logger.go:42: 07:01:27 | es-from-aio-to-production/0-install | starting test step 0-install logger.go:42: 07:01:27 | es-from-aio-to-production/0-install | Jaeger:kuttl-test-related-seasnail/my-jaeger created logger.go:42: 07:01:36 | es-from-aio-to-production/0-install | test step completed 0-install logger.go:42: 07:01:36 | es-from-aio-to-production/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:01:36 | es-from-aio-to-production/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:01:37 | es-from-aio-to-production/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:01:44 | es-from-aio-to-production/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:01:45 | es-from-aio-to-production/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:01:45 | es-from-aio-to-production/1-smoke-test | job.batch/report-span created logger.go:42: 07:01:45 | es-from-aio-to-production/1-smoke-test | job.batch/check-span created logger.go:42: 07:01:56 | es-from-aio-to-production/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:01:56 | es-from-aio-to-production/3-install | starting test step 3-install logger.go:42: 07:01:56 | es-from-aio-to-production/3-install | Jaeger:kuttl-test-related-seasnail/my-jaeger updated logger.go:42: 07:02:30 | es-from-aio-to-production/3-install | test step completed 3-install logger.go:42: 07:02:30 | es-from-aio-to-production/4-smoke-test | starting test step 4-smoke-test logger.go:42: 07:02:30 | es-from-aio-to-production/4-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:02:38 | es-from-aio-to-production/4-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:02:38 | es-from-aio-to-production/4-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:02:39 | es-from-aio-to-production/4-smoke-test | job.batch/report-span unchanged logger.go:42: 07:02:39 | es-from-aio-to-production/4-smoke-test | job.batch/check-span unchanged logger.go:42: 07:02:39 | es-from-aio-to-production/4-smoke-test | test step completed 4-smoke-test logger.go:42: 07:02:39 | es-from-aio-to-production | es-from-aio-to-production events from ns kuttl-test-related-seasnail: logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:31 +0000 UTC Normal Pod my-jaeger-64f6f877c8-94qck Binding Scheduled Successfully assigned kuttl-test-related-seasnail/my-jaeger-64f6f877c8-94qck to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:31 +0000 UTC Warning Pod my-jaeger-64f6f877c8-94qck FailedMount MountVolume.SetUp failed for volume "my-jaeger-collector-tls-config-volume" : secret "my-jaeger-collector-headless-tls" not found kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:31 +0000 UTC Normal ReplicaSet.apps my-jaeger-64f6f877c8 SuccessfulCreate Created pod: my-jaeger-64f6f877c8-94qck replicaset-controller logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:31 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-64f6f877c8 to 1 deployment-controller logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:32 +0000 UTC Normal Pod my-jaeger-64f6f877c8-94qck AddedInterface Add eth0 [10.128.2.50/23] from ovn-kubernetes logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:32 +0000 UTC Normal Pod my-jaeger-64f6f877c8-94qck.spec.containers{jaeger} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:34 +0000 UTC Normal Pod my-jaeger-64f6f877c8-94qck.spec.containers{jaeger} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" in 1.939s (1.939s including waiting) kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:34 +0000 UTC Normal Pod my-jaeger-64f6f877c8-94qck.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:34 +0000 UTC Normal Pod my-jaeger-64f6f877c8-94qck.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:34 +0000 UTC Normal Pod my-jaeger-64f6f877c8-94qck.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:34 +0000 UTC Normal Pod my-jaeger-64f6f877c8-94qck.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:34 +0000 UTC Normal Pod my-jaeger-64f6f877c8-94qck.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:38 +0000 UTC Normal Pod my-jaeger-64f6f877c8-94qck.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:38 +0000 UTC Normal Pod my-jaeger-64f6f877c8-94qck.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:38 +0000 UTC Normal ReplicaSet.apps my-jaeger-64f6f877c8 SuccessfulDelete Deleted pod: my-jaeger-64f6f877c8-94qck replicaset-controller logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:38 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-64f6f877c8 to 0 from 1 deployment-controller logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:39 +0000 UTC Normal Pod my-jaeger-68d4994877-8fngn Binding Scheduled Successfully assigned kuttl-test-related-seasnail/my-jaeger-68d4994877-8fngn to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:39 +0000 UTC Normal ReplicaSet.apps my-jaeger-68d4994877 SuccessfulCreate Created pod: my-jaeger-68d4994877-8fngn replicaset-controller logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:39 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-68d4994877 to 1 deployment-controller logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:40 +0000 UTC Normal Pod my-jaeger-68d4994877-8fngn AddedInterface Add eth0 [10.128.2.51/23] from ovn-kubernetes logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:40 +0000 UTC Normal Pod my-jaeger-68d4994877-8fngn.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:40 +0000 UTC Normal Pod my-jaeger-68d4994877-8fngn.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:40 +0000 UTC Normal Pod my-jaeger-68d4994877-8fngn.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:40 +0000 UTC Normal Pod my-jaeger-68d4994877-8fngn.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:40 +0000 UTC Normal Pod my-jaeger-68d4994877-8fngn.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:40 +0000 UTC Normal Pod my-jaeger-68d4994877-8fngn.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:45 +0000 UTC Normal Pod check-span-v4ls8 Binding Scheduled Successfully assigned kuttl-test-related-seasnail/check-span-v4ls8 to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:45 +0000 UTC Normal Pod check-span-v4ls8 AddedInterface Add eth0 [10.131.0.50/23] from ovn-kubernetes logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:45 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-v4ls8 job-controller logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:45 +0000 UTC Normal Pod report-span-8n5h5 Binding Scheduled Successfully assigned kuttl-test-related-seasnail/report-span-8n5h5 to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:45 +0000 UTC Normal Pod report-span-8n5h5 AddedInterface Add eth0 [10.129.2.23/23] from ovn-kubernetes logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:45 +0000 UTC Normal Pod report-span-8n5h5.spec.containers{report-span} Pulling Pulling image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:45 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-8n5h5 job-controller logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:46 +0000 UTC Normal Pod check-span-v4ls8.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:46 +0000 UTC Normal Pod check-span-v4ls8.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:46 +0000 UTC Normal Pod check-span-v4ls8.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:48 +0000 UTC Normal Pod report-span-8n5h5.spec.containers{report-span} Pulled Successfully pulled image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" in 2.49s (2.49s including waiting) kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:48 +0000 UTC Normal Pod report-span-8n5h5.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:48 +0000 UTC Normal Pod report-span-8n5h5.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:01:56 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:00 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrelatedseasnailmyjaeger-1-74b744xzv4 Binding Scheduled Successfully assigned kuttl-test-related-seasnail/elasticsearch-cdm-kuttltestrelatedseasnailmyjaeger-1-74b744xzv4 to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:00 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestrelatedseasnailmyjaeger-1-74b74fd6d4 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestrelatedseasnailmyjaeger-1-74b744xzv4 replicaset-controller logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:00 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestrelatedseasnailmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestrelatedseasnailmyjaeger-1-74b74fd6d4 to 1 deployment-controller logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrelatedseasnailmyjaeger-1-74b744xzv4 AddedInterface Add eth0 [10.129.2.24/23] from ovn-kubernetes logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrelatedseasnailmyjaeger-1-74b744xzv4.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrelatedseasnailmyjaeger-1-74b744xzv4.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrelatedseasnailmyjaeger-1-74b744xzv4.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrelatedseasnailmyjaeger-1-74b744xzv4.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrelatedseasnailmyjaeger-1-74b744xzv4.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrelatedseasnailmyjaeger-1-74b744xzv4.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:11 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestrelatedseasnailmyjaeger-1-74b744xzv4.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:16 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestrelatedseasnailmyjaeger-1-74b744xzv4.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:21 +0000 UTC Normal Job.batch report-span Completed Job completed job-controller logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:27 +0000 UTC Normal Pod my-jaeger-68d4994877-8fngn.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:27 +0000 UTC Normal Pod my-jaeger-68d4994877-8fngn.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:27 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-fwhns Binding Scheduled Successfully assigned kuttl-test-related-seasnail/my-jaeger-collector-5489f5bd9b-fwhns to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:27 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5489f5bd9b SuccessfulCreate Created pod: my-jaeger-collector-5489f5bd9b-fwhns replicaset-controller logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:27 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-5489f5bd9b to 1 deployment-controller logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:27 +0000 UTC Normal Pod my-jaeger-query-68f9cdbb57-ss878 Binding Scheduled Successfully assigned kuttl-test-related-seasnail/my-jaeger-query-68f9cdbb57-ss878 to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:27 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-68f9cdbb57 SuccessfulCreate Created pod: my-jaeger-query-68f9cdbb57-ss878 replicaset-controller logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:27 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-68f9cdbb57 to 1 deployment-controller logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:28 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-fwhns AddedInterface Add eth0 [10.131.0.51/23] from ovn-kubernetes logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:28 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-fwhns.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:28 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-fwhns.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:28 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-fwhns.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:28 +0000 UTC Normal Pod my-jaeger-query-68f9cdbb57-ss878 AddedInterface Add eth0 [10.131.0.52/23] from ovn-kubernetes logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:28 +0000 UTC Normal Pod my-jaeger-query-68f9cdbb57-ss878.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:28 +0000 UTC Normal Pod my-jaeger-query-68f9cdbb57-ss878.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:28 +0000 UTC Normal Pod my-jaeger-query-68f9cdbb57-ss878.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:28 +0000 UTC Normal Pod my-jaeger-query-68f9cdbb57-ss878.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:28 +0000 UTC Normal Pod my-jaeger-query-68f9cdbb57-ss878.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:28 +0000 UTC Normal Pod my-jaeger-query-68f9cdbb57-ss878.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:28 +0000 UTC Normal Pod my-jaeger-query-68f9cdbb57-ss878.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:28 +0000 UTC Normal Pod my-jaeger-query-68f9cdbb57-ss878.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | 2023-12-04 07:02:28 +0000 UTC Normal Pod my-jaeger-query-68f9cdbb57-ss878.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:02:39 | es-from-aio-to-production | Deleting namespace: kuttl-test-related-seasnail === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (864.48s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.72s) --- PASS: kuttl/harness/es-multiinstance (121.60s) --- PASS: kuttl/harness/es-streaming-autoprovisioned (205.69s) --- PASS: kuttl/harness/es-simple-prod (6.15s) --- PASS: kuttl/harness/es-rollover-autoprov (204.93s) --- PASS: kuttl/harness/es-increasing-replicas (103.09s) --- PASS: kuttl/harness/es-index-cleaner-autoprov (137.19s) --- PASS: kuttl/harness/es-from-aio-to-production (78.94s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name elasticsearch --report --output /logs/artifacts/elasticsearch.xml ./artifacts/kuttl-report.xml time="2023-12-04T07:02:46Z" level=debug msg="Setting a new name for the test suites" time="2023-12-04T07:02:46Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-04T07:02:46Z" level=debug msg="normalizing test case names" time="2023-12-04T07:02:46Z" level=debug msg="elasticsearch/artifacts -> elasticsearch_artifacts" time="2023-12-04T07:02:46Z" level=debug msg="elasticsearch/es-multiinstance -> elasticsearch_es_multiinstance" time="2023-12-04T07:02:46Z" level=debug msg="elasticsearch/es-streaming-autoprovisioned -> elasticsearch_es_streaming_autoprovisioned" time="2023-12-04T07:02:46Z" level=debug msg="elasticsearch/es-simple-prod -> elasticsearch_es_simple_prod" time="2023-12-04T07:02:46Z" level=debug msg="elasticsearch/es-rollover-autoprov -> elasticsearch_es_rollover_autoprov" time="2023-12-04T07:02:46Z" level=debug msg="elasticsearch/es-increasing-replicas -> elasticsearch_es_increasing_replicas" time="2023-12-04T07:02:46Z" level=debug msg="elasticsearch/es-index-cleaner-autoprov -> elasticsearch_es_index_cleaner_autoprov" time="2023-12-04T07:02:46Z" level=debug msg="elasticsearch/es-from-aio-to-production -> elasticsearch_es_from_aio_to_production" +--------------------------------------------+--------+ | NAME | RESULT | +--------------------------------------------+--------+ | elasticsearch_artifacts | passed | | elasticsearch_es_multiinstance | passed | | elasticsearch_es_streaming_autoprovisioned | passed | | elasticsearch_es_simple_prod | passed | | elasticsearch_es_rollover_autoprov | passed | | elasticsearch_es_increasing_replicas | passed | | elasticsearch_es_index_cleaner_autoprov | passed | | elasticsearch_es_from_aio_to_production | passed | +--------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh examples false true + '[' 3 -ne 3 ']' + test_suite_name=examples + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/examples.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-examples make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=0.32.0 \ SKIP_KAFKA=false \ VERTX_IMG=jaegertracing/vertx-create-span:operator-e2e-tests \ ./tests/e2e/examples/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-02-123536 True False 24m Cluster version is 4.15.0-0.nightly-2023-12-02-123536' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-02-123536 True False 24m Cluster version is 4.15.0-0.nightly-2023-12-02-123536' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 0.32.0 ']' ++ version_le 0.32.0 0.25.0 +++ echo 0.32.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 0.32.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/examples/render.sh ++ export SUITE_DIR=./tests/e2e/examples ++ SUITE_DIR=./tests/e2e/examples ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/examples ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test examples-agent-as-daemonset + '[' 1 -ne 1 ']' + test_name=examples-agent-as-daemonset + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-as-daemonset' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-as-daemonset\e[0m' Rendering files for test examples-agent-as-daemonset + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build + '[' _build '!=' _build ']' + mkdir -p examples-agent-as-daemonset + cd examples-agent-as-daemonset + example_name=agent-as-daemonset + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + render_install_example agent-as-daemonset 01 + '[' 2 -ne 2 ']' + example_name=agent-as-daemonset + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/agent-as-daemonset.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + JAEGER_NAME=agent-as-daemonset + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=DaemonSet ++ '[' DaemonSet = null ']' ++ echo DaemonSet ++ return 0 + jaeger_strategy=DaemonSet + '[' DaemonSet = DaemonSet ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example agent-as-daemonset 02 + '[' 2 -ne 2 ']' + example_name=agent-as-daemonset + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/agent-as-daemonset.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/agent-as-daemonset.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/agent-as-daemonset.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/agent-as-daemonset.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + jaeger_name=agent-as-daemonset + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test agent-as-daemonset true 02 + '[' 3 -ne 3 ']' + jaeger=agent-as-daemonset + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-agent-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-agent-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-with-priority-class\e[0m' Rendering files for test examples-agent-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-as-daemonset + '[' examples-agent-as-daemonset '!=' _build ']' + cd .. + mkdir -p examples-agent-with-priority-class + cd examples-agent-with-priority-class + example_name=agent-with-priority-class + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + render_install_example agent-with-priority-class 01 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/agent-with-priority-class.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + JAEGER_NAME=agent-as-daemonset + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=DaemonSet ++ '[' DaemonSet = null ']' ++ echo DaemonSet ++ return 0 + jaeger_strategy=DaemonSet + '[' DaemonSet = DaemonSet ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example agent-with-priority-class 02 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + jaeger_name=agent-as-daemonset + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test agent-as-daemonset true 02 + '[' 3 -ne 3 ']' + jaeger=agent-as-daemonset + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-all-in-one-with-options + '[' 1 -ne 1 ']' + test_name=examples-all-in-one-with-options + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-all-in-one-with-options' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-all-in-one-with-options\e[0m' Rendering files for test examples-all-in-one-with-options + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-with-priority-class + '[' examples-agent-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-all-in-one-with-options + cd examples-all-in-one-with-options + example_name=all-in-one-with-options + render_install_example all-in-one-with-options 00 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/all-in-one-with-options.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + JAEGER_NAME=my-jaeger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.metadata.name="my-jaeger"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i 'del(.spec.allInOne.image)' ./00-install.yaml + render_smoke_test_example all-in-one-with-options 01 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + jaeger_name=my-jaeger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + sed -i s~my-jaeger-query:443~my-jaeger-query:443/jaeger~gi ./01-smoke-test.yaml + '[' false = true ']' + start_test examples-auto-provision-kafka + '[' 1 -ne 1 ']' + test_name=examples-auto-provision-kafka + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-auto-provision-kafka' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-auto-provision-kafka\e[0m' Rendering files for test examples-auto-provision-kafka + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-all-in-one-with-options + '[' examples-all-in-one-with-options '!=' _build ']' + cd .. + mkdir -p examples-auto-provision-kafka + cd examples-auto-provision-kafka + example_name=auto-provision-kafka + render_install_kafka_operator 01 + '[' 1 -ne 1 ']' + test_step=01 + '[' true '!=' true ']' + render_install_example auto-provision-kafka 02 + '[' 2 -ne 2 ']' + example_name=auto-provision-kafka + test_step=02 + install_file=./02-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/auto-provision-kafka.yaml -o ./02-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./02-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./02-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./02-install.yaml ++ jaeger_name=auto-provision-kafka ++ '[' -z auto-provision-kafka ']' ++ echo auto-provision-kafka ++ return 0 + JAEGER_NAME=auto-provision-kafka + local jaeger_strategy ++ get_jaeger_strategy ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./02-install.yaml ++ strategy=streaming ++ '[' streaming = production ']' ++ '[' streaming = streaming ']' ++ echo streaming ++ return 0 + jaeger_strategy=streaming + '[' streaming = DaemonSet ']' + '[' streaming = allInOne ']' + '[' streaming = production ']' + '[' streaming = streaming ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/streaming-jaeger-assert.yaml.template -o ./02-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./02-install.yaml + mv ./02-assert.yaml ./05-assert.yaml + render_assert_kafka true auto-provision-kafka 02 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provision-kafka + test_step=02 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provision-kafka + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./02-assert.yaml ++ expr 02 + 1 + CLUSTER_NAME=auto-provision-kafka + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./3-assert.yaml ++ expr 02 + 2 + CLUSTER_NAME=auto-provision-kafka + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./04-assert.yaml + render_smoke_test_example auto-provision-kafka 06 + '[' 2 -ne 2 ']' + example_name=auto-provision-kafka + test_step=06 + deployment_file=/tmp/jaeger-tests/examples/auto-provision-kafka.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/auto-provision-kafka.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/auto-provision-kafka.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/auto-provision-kafka.yaml ++ jaeger_name=auto-provision-kafka ++ '[' -z auto-provision-kafka ']' ++ echo auto-provision-kafka ++ return 0 + jaeger_name=auto-provision-kafka + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test auto-provision-kafka true 06 + '[' 3 -ne 3 ']' + jaeger=auto-provision-kafka + is_secured=true + test_step=06 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://auto-provision-kafka-query:443 + JAEGER_QUERY_ENDPOINT=https://auto-provision-kafka-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://auto-provision-kafka-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://auto-provision-kafka-collector-headless:14268 + export JAEGER_NAME=auto-provision-kafka + JAEGER_NAME=auto-provision-kafka + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./06-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-business-application-injected-sidecar + '[' 1 -ne 1 ']' + test_name=examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-business-application-injected-sidecar' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-business-application-injected-sidecar\e[0m' Rendering files for test examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-auto-provision-kafka + '[' examples-auto-provision-kafka '!=' _build ']' + cd .. + mkdir -p examples-business-application-injected-sidecar + cd examples-business-application-injected-sidecar + example_name=simplest + cp /tmp/jaeger-tests/examples/business-application-injected-sidecar.yaml ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].image=strenv(VERTX_IMG)' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.path="/"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.port=8080' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.initialDelaySeconds=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.failureThreshold=3' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.periodSeconds=10' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.successThreshold=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.timeoutSeconds=1' ./00-install.yaml + render_install_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example simplest 02 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 02 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-collector-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-collector-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-collector-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-collector-with-priority-class\e[0m' Rendering files for test examples-collector-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-business-application-injected-sidecar + '[' examples-business-application-injected-sidecar '!=' _build ']' + cd .. + mkdir -p examples-collector-with-priority-class + cd examples-collector-with-priority-class + example_name=collector-with-priority-class + render_install_example collector-with-priority-class 00 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/collector-with-priority-class.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + JAEGER_NAME=collector-with-high-priority + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example collector-with-priority-class 01 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + jaeger_name=collector-with-high-priority + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test collector-with-high-priority true 01 + '[' 3 -ne 3 ']' + jaeger=collector-with-high-priority + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + export JAEGER_NAME=collector-with-high-priority + JAEGER_NAME=collector-with-high-priority + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-service-types + '[' 1 -ne 1 ']' + test_name=examples-service-types + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-service-types' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-service-types\e[0m' Rendering files for test examples-service-types + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-collector-with-priority-class + '[' examples-collector-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-service-types + cd examples-service-types + example_name=service-types + render_install_example service-types 00 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/service-types.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + JAEGER_NAME=service-types + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example service-types 01 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/service-types.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/service-types.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/service-types.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/service-types.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + jaeger_name=service-types + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test service-types true 01 + '[' 3 -ne 3 ']' + jaeger=service-types + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + export JAEGER_NAME=service-types + JAEGER_NAME=service-types + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod + '[' 1 -ne 1 ']' + test_name=examples-simple-prod + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod\e[0m' Rendering files for test examples-simple-prod + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-service-types + '[' examples-service-types '!=' _build ']' + cd .. + mkdir -p examples-simple-prod + cd examples-simple-prod + example_name=simple-prod + render_install_example simple-prod 01 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod 02 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod-with-volumes + '[' 1 -ne 1 ']' + test_name=examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod-with-volumes' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod-with-volumes\e[0m' Rendering files for test examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod + '[' examples-simple-prod '!=' _build ']' + cd .. + mkdir -p examples-simple-prod-with-volumes + cd examples-simple-prod-with-volumes + example_name=simple-prod-with-volumes + render_install_example simple-prod-with-volumes 01 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod-with-volumes 02 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + /tmp/jaeger-tests/bin/gomplate -f ./03-check-volume.yaml.template -o 03-check-volume.yaml + start_test examples-simplest + '[' 1 -ne 1 ']' + test_name=examples-simplest + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simplest' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simplest\e[0m' Rendering files for test examples-simplest + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod-with-volumes + '[' examples-simple-prod-with-volumes '!=' _build ']' + cd .. + mkdir -p examples-simplest + cd examples-simplest + example_name=simplest + render_install_example simplest 00 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 01 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger + '[' 1 -ne 1 ']' + test_name=examples-with-badger + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger\e[0m' Rendering files for test examples-with-badger + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simplest + '[' examples-simplest '!=' _build ']' + cd .. + mkdir -p examples-with-badger + cd examples-with-badger + example_name=with-badger + render_install_example with-badger 00 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + JAEGER_NAME=with-badger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger 01 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + jaeger_name=with-badger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + export JAEGER_NAME=with-badger + JAEGER_NAME=with-badger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger-and-volume + '[' 1 -ne 1 ']' + test_name=examples-with-badger-and-volume + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger-and-volume' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger-and-volume\e[0m' Rendering files for test examples-with-badger-and-volume + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger + '[' examples-with-badger '!=' _build ']' + cd .. + mkdir -p examples-with-badger-and-volume + cd examples-with-badger-and-volume + example_name=with-badger-and-volume + render_install_example with-badger-and-volume 00 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger-and-volume.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + JAEGER_NAME=with-badger-and-volume + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger-and-volume 01 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + jaeger_name=with-badger-and-volume + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger-and-volume true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger-and-volume + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + export JAEGER_NAME=with-badger-and-volume + JAEGER_NAME=with-badger-and-volume + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-cassandra + '[' 1 -ne 1 ']' + test_name=examples-with-cassandra + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-cassandra' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-cassandra\e[0m' Rendering files for test examples-with-cassandra + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger-and-volume + '[' examples-with-badger-and-volume '!=' _build ']' + cd .. + mkdir -p examples-with-cassandra + cd examples-with-cassandra + example_name=with-cassandra + render_install_cassandra 00 + '[' 1 -ne 1 ']' + test_step=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-assert.yaml.template -o ./00-assert.yaml + render_install_example with-cassandra 01 + '[' 2 -ne 2 ']' + example_name=with-cassandra + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-cassandra.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=with-cassandra ++ '[' -z with-cassandra ']' ++ echo with-cassandra ++ return 0 + JAEGER_NAME=with-cassandra + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example with-cassandra 02 + '[' 2 -ne 2 ']' + example_name=with-cassandra + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/with-cassandra.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-cassandra.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-cassandra.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-cassandra.yaml ++ jaeger_name=with-cassandra ++ '[' -z with-cassandra ']' ++ echo with-cassandra ++ return 0 + jaeger_name=with-cassandra + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-cassandra true 02 + '[' 3 -ne 3 ']' + jaeger=with-cassandra + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 + JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 + export JAEGER_NAME=with-cassandra + JAEGER_NAME=with-cassandra + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-sampling + '[' 1 -ne 1 ']' + test_name=examples-with-sampling + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-sampling' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-sampling\e[0m' Rendering files for test examples-with-sampling + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-cassandra + '[' examples-with-cassandra '!=' _build ']' + cd .. + mkdir -p examples-with-sampling + cd examples-with-sampling + export example_name=with-sampling + example_name=with-sampling + render_install_cassandra 00 + '[' 1 -ne 1 ']' + test_step=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-assert.yaml.template -o ./00-assert.yaml + render_install_example with-sampling 01 + '[' 2 -ne 2 ']' + example_name=with-sampling + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-sampling.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=with-sampling ++ '[' -z with-sampling ']' ++ echo with-sampling ++ return 0 + JAEGER_NAME=with-sampling + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example with-sampling 02 + '[' 2 -ne 2 ']' + example_name=with-sampling + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/with-sampling.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-sampling.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-sampling.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-sampling.yaml ++ jaeger_name=with-sampling ++ '[' -z with-sampling ']' ++ echo with-sampling ++ return 0 + jaeger_name=with-sampling + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-sampling true 02 + '[' 3 -ne 3 ']' + jaeger=with-sampling + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 + JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 + export JAEGER_NAME=with-sampling + JAEGER_NAME=with-sampling + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + start_test examples-openshift-agent-as-daemonset + '[' 1 -ne 1 ']' + test_name=examples-openshift-agent-as-daemonset + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-openshift-agent-as-daemonset' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-openshift-agent-as-daemonset\e[0m' Rendering files for test examples-openshift-agent-as-daemonset + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-sampling + '[' examples-with-sampling '!=' _build ']' + cd .. + mkdir -p examples-openshift-agent-as-daemonset + cd examples-openshift-agent-as-daemonset + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/agent-as-daemonset.yaml -o 02-install.yaml + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./02-assert.yaml + render_install_vertx 03 + '[' 1 -ne 1 ']' + test_step=03 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./03-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].env=[{"name": "JAEGER_AGENT_HOST", "valueFrom": {"fieldRef": {"apiVersion": "v1", "fieldPath": "status.hostIP"}}}]' ./03-install.yaml + render_find_service agent-as-daemonset production order 00 04 + '[' 5 -ne 5 ']' + jaeger=agent-as-daemonset + deployment_strategy=production + service_name=order + job_number=00 + test_step=04 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' production '!=' allInOne ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/find-service.yaml.template -o ./04-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + start_test examples-openshift-with-htpasswd + '[' 1 -ne 1 ']' + test_name=examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-openshift-with-htpasswd' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-openshift-with-htpasswd\e[0m' Rendering files for test examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-openshift-agent-as-daemonset + '[' examples-openshift-agent-as-daemonset '!=' _build ']' + cd .. + mkdir -p examples-openshift-with-htpasswd + cd examples-openshift-with-htpasswd + export JAEGER_NAME=with-htpasswd + JAEGER_NAME=with-htpasswd + export JAEGER_USERNAME=awesomeuser + JAEGER_USERNAME=awesomeuser + export JAEGER_PASSWORD=awesomepassword + JAEGER_PASSWORD=awesomepassword + export 'JAEGER_USER_PASSWORD_HASH=awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' + JAEGER_USER_PASSWORD_HASH='awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ echo 'awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ base64 + SECRET=YXdlc29tZXVzZXI6e1NIQX11VWRxUFZVeXFOQm1FUlUwUXhqM0tGYVpuanc9Cg== + /tmp/jaeger-tests/bin/gomplate -f ./00-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/with-htpasswd.yaml -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + export 'GET_URL_COMMAND=kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + export 'URL=https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + INSECURE=true + JAEGER_USERNAME= + JAEGER_PASSWORD= + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-unsecured.yaml + JAEGER_USERNAME=wronguser + JAEGER_PASSWORD=wrongpassword + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-check-unauthorized.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./04-check-authorized.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running examples E2E tests' Running examples E2E tests + cd tests/e2e/examples/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3107041029 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 17 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/examples-agent-as-daemonset === PAUSE kuttl/harness/examples-agent-as-daemonset === RUN kuttl/harness/examples-agent-with-priority-class === PAUSE kuttl/harness/examples-agent-with-priority-class === RUN kuttl/harness/examples-all-in-one-with-options === PAUSE kuttl/harness/examples-all-in-one-with-options === RUN kuttl/harness/examples-auto-provision-kafka === PAUSE kuttl/harness/examples-auto-provision-kafka === RUN kuttl/harness/examples-business-application-injected-sidecar === PAUSE kuttl/harness/examples-business-application-injected-sidecar === RUN kuttl/harness/examples-collector-with-priority-class === PAUSE kuttl/harness/examples-collector-with-priority-class === RUN kuttl/harness/examples-openshift-agent-as-daemonset === PAUSE kuttl/harness/examples-openshift-agent-as-daemonset === RUN kuttl/harness/examples-openshift-with-htpasswd === PAUSE kuttl/harness/examples-openshift-with-htpasswd === RUN kuttl/harness/examples-service-types === PAUSE kuttl/harness/examples-service-types === RUN kuttl/harness/examples-simple-prod === PAUSE kuttl/harness/examples-simple-prod === RUN kuttl/harness/examples-simple-prod-with-volumes === PAUSE kuttl/harness/examples-simple-prod-with-volumes === RUN kuttl/harness/examples-simplest === PAUSE kuttl/harness/examples-simplest === RUN kuttl/harness/examples-with-badger === PAUSE kuttl/harness/examples-with-badger === RUN kuttl/harness/examples-with-badger-and-volume === PAUSE kuttl/harness/examples-with-badger-and-volume === RUN kuttl/harness/examples-with-cassandra === PAUSE kuttl/harness/examples-with-cassandra === RUN kuttl/harness/examples-with-sampling === PAUSE kuttl/harness/examples-with-sampling === CONT kuttl/harness/artifacts logger.go:42: 07:03:25 | artifacts | Creating namespace: kuttl-test-one-piranha logger.go:42: 07:03:25 | artifacts | artifacts events from ns kuttl-test-one-piranha: logger.go:42: 07:03:25 | artifacts | Deleting namespace: kuttl-test-one-piranha === CONT kuttl/harness/examples-service-types logger.go:42: 07:03:31 | examples-service-types | Creating namespace: kuttl-test-living-duckling logger.go:42: 07:03:31 | examples-service-types/0-install | starting test step 0-install logger.go:42: 07:03:31 | examples-service-types/0-install | Jaeger:kuttl-test-living-duckling/service-types created logger.go:42: 07:03:37 | examples-service-types/0-install | test step completed 0-install logger.go:42: 07:03:37 | examples-service-types/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:03:37 | examples-service-types/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE service-types /dev/null] logger.go:42: 07:03:38 | examples-service-types/1-smoke-test | Warning: resource jaegers/service-types is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:03:45 | examples-service-types/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://service-types-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:03:45 | examples-service-types/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:03:46 | examples-service-types/1-smoke-test | job.batch/report-span created logger.go:42: 07:03:46 | examples-service-types/1-smoke-test | job.batch/check-span created logger.go:42: 07:03:57 | examples-service-types/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:03:57 | examples-service-types/2- | starting test step 2- logger.go:42: 07:03:57 | examples-service-types/2- | test step completed 2- logger.go:42: 07:03:57 | examples-service-types | examples-service-types events from ns kuttl-test-living-duckling: logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:34 +0000 UTC Normal Pod service-types-7d767bd94f-kbbfv Binding Scheduled Successfully assigned kuttl-test-living-duckling/service-types-7d767bd94f-kbbfv to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:34 +0000 UTC Normal ReplicaSet.apps service-types-7d767bd94f SuccessfulCreate Created pod: service-types-7d767bd94f-kbbfv replicaset-controller logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:34 +0000 UTC Normal Service service-types-query EnsuringLoadBalancer Ensuring load balancer service-controller logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:34 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-7d767bd94f to 1 deployment-controller logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:35 +0000 UTC Normal Pod service-types-7d767bd94f-kbbfv AddedInterface Add eth0 [10.128.2.52/23] from ovn-kubernetes logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:35 +0000 UTC Normal Pod service-types-7d767bd94f-kbbfv.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:35 +0000 UTC Normal Pod service-types-7d767bd94f-kbbfv.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:35 +0000 UTC Normal Pod service-types-7d767bd94f-kbbfv.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:35 +0000 UTC Normal Pod service-types-7d767bd94f-kbbfv.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:35 +0000 UTC Normal Pod service-types-7d767bd94f-kbbfv.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:35 +0000 UTC Normal Pod service-types-7d767bd94f-kbbfv.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:37 +0000 UTC Warning Pod service-types-7d767bd94f-kbbfv FailedMount MountVolume.SetUp failed for volume "service-types-service-ca" : configmap references non-existent config key: service-ca.crt kubelet logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:37 +0000 UTC Normal Service service-types-collector EnsuringLoadBalancer Ensuring load balancer service-controller logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:37 +0000 UTC Normal Service service-types-query EnsuredLoadBalancer Ensured load balancer service-controller logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:40 +0000 UTC Normal Service service-types-collector EnsuredLoadBalancer Ensured load balancer service-controller logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:42 +0000 UTC Normal Pod service-types-78665648f7-4vm5k Binding Scheduled Successfully assigned kuttl-test-living-duckling/service-types-78665648f7-4vm5k to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:42 +0000 UTC Normal ReplicaSet.apps service-types-78665648f7 SuccessfulCreate Created pod: service-types-78665648f7-4vm5k replicaset-controller logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:42 +0000 UTC Normal Pod service-types-7d767bd94f-kbbfv.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:42 +0000 UTC Normal Pod service-types-7d767bd94f-kbbfv.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:42 +0000 UTC Normal ReplicaSet.apps service-types-7d767bd94f SuccessfulDelete Deleted pod: service-types-7d767bd94f-kbbfv replicaset-controller logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:42 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled down replica set service-types-7d767bd94f to 0 from 1 deployment-controller logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:42 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-78665648f7 to 1 deployment-controller logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:43 +0000 UTC Normal Pod service-types-78665648f7-4vm5k AddedInterface Add eth0 [10.128.2.53/23] from ovn-kubernetes logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:43 +0000 UTC Normal Pod service-types-78665648f7-4vm5k.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:43 +0000 UTC Normal Pod service-types-78665648f7-4vm5k.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:43 +0000 UTC Normal Pod service-types-78665648f7-4vm5k.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:43 +0000 UTC Normal Pod service-types-78665648f7-4vm5k.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:43 +0000 UTC Normal Pod service-types-78665648f7-4vm5k.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:43 +0000 UTC Normal Pod service-types-78665648f7-4vm5k.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:46 +0000 UTC Normal Pod check-span-2nqmd Binding Scheduled Successfully assigned kuttl-test-living-duckling/check-span-2nqmd to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:46 +0000 UTC Normal Pod check-span-2nqmd AddedInterface Add eth0 [10.131.0.53/23] from ovn-kubernetes logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:46 +0000 UTC Normal Pod check-span-2nqmd.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:46 +0000 UTC Normal Pod check-span-2nqmd.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:46 +0000 UTC Normal Pod check-span-2nqmd.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:46 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-2nqmd job-controller logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:46 +0000 UTC Normal Pod report-span-2wfs2 Binding Scheduled Successfully assigned kuttl-test-living-duckling/report-span-2wfs2 to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:46 +0000 UTC Normal Pod report-span-2wfs2 AddedInterface Add eth0 [10.129.2.25/23] from ovn-kubernetes logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:46 +0000 UTC Normal Pod report-span-2wfs2.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:46 +0000 UTC Normal Pod report-span-2wfs2.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:46 +0000 UTC Normal Pod report-span-2wfs2.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:46 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-2wfs2 job-controller logger.go:42: 07:03:57 | examples-service-types | 2023-12-04 07:03:56 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:03:57 | examples-service-types | Deleting namespace: kuttl-test-living-duckling === CONT kuttl/harness/examples-with-sampling logger.go:42: 07:04:24 | examples-with-sampling | Creating namespace: kuttl-test-square-bull logger.go:42: 07:04:24 | examples-with-sampling/0-install | starting test step 0-install logger.go:42: 07:04:24 | examples-with-sampling/0-install | running command: [sh -c cd /tmp/jaeger-tests && make cassandra STORAGE_NAMESPACE=$NAMESPACE] logger.go:42: 07:04:24 | examples-with-sampling/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 07:04:24 | examples-with-sampling/0-install | >>>> Creating namespace kuttl-test-square-bull logger.go:42: 07:04:24 | examples-with-sampling/0-install | kubectl create namespace kuttl-test-square-bull 2>&1 | grep -v "already exists" || true logger.go:42: 07:04:25 | examples-with-sampling/0-install | kubectl create -f ./tests/cassandra.yml --namespace kuttl-test-square-bull 2>&1 | grep -v "already exists" || true logger.go:42: 07:04:25 | examples-with-sampling/0-install | service/cassandra created logger.go:42: 07:04:25 | examples-with-sampling/0-install | statefulset.apps/cassandra created logger.go:42: 07:04:25 | examples-with-sampling/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 07:04:37 | examples-with-sampling/0-install | test step completed 0-install logger.go:42: 07:04:37 | examples-with-sampling/1-install | starting test step 1-install logger.go:42: 07:04:37 | examples-with-sampling/1-install | Jaeger:kuttl-test-square-bull/with-sampling created logger.go:42: 07:04:43 | examples-with-sampling/1-install | test step completed 1-install logger.go:42: 07:04:43 | examples-with-sampling/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:04:43 | examples-with-sampling/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-sampling /dev/null] logger.go:42: 07:04:45 | examples-with-sampling/2-smoke-test | Warning: resource jaegers/with-sampling is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:04:51 | examples-with-sampling/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:04:52 | examples-with-sampling/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:04:52 | examples-with-sampling/2-smoke-test | job.batch/report-span created logger.go:42: 07:04:52 | examples-with-sampling/2-smoke-test | job.batch/check-span created logger.go:42: 07:05:04 | examples-with-sampling/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:05:04 | examples-with-sampling/3- | starting test step 3- logger.go:42: 07:05:04 | examples-with-sampling/3- | test step completed 3- logger.go:42: 07:05:04 | examples-with-sampling | examples-with-sampling events from ns kuttl-test-square-bull: logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:25 +0000 UTC Normal Pod cassandra-0 Binding Scheduled Successfully assigned kuttl-test-square-bull/cassandra-0 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:25 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-0 in StatefulSet cassandra successful statefulset-controller logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:27 +0000 UTC Warning Pod cassandra-0 FailedToRetrieveImagePullSecret Unable to retrieve some image pull secrets (default-dockercfg-lzrjp); attempting to pull the image may not succeed. kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:27 +0000 UTC Normal Pod cassandra-0 AddedInterface Add eth0 [10.128.2.54/23] from ovn-kubernetes logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:27 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulling Pulling image "cassandra:3.11" kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:31 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulled Successfully pulled image "cassandra:3.11" in 3.911s (3.911s including waiting) kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:31 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:31 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:31 +0000 UTC Normal Pod cassandra-1 Binding Scheduled Successfully assigned kuttl-test-square-bull/cassandra-1 to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:31 +0000 UTC Normal Pod cassandra-1 AddedInterface Add eth0 [10.131.0.54/23] from ovn-kubernetes logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:31 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulling Pulling image "cassandra:3.11" kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:31 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-1 in StatefulSet cassandra successful statefulset-controller logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:36 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulled Successfully pulled image "cassandra:3.11" in 4.73s (4.73s including waiting) kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:36 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:36 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:41 +0000 UTC Normal Pod with-sampling-798d8bbd67-2mbm2 Binding Scheduled Successfully assigned kuttl-test-square-bull/with-sampling-798d8bbd67-2mbm2 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:41 +0000 UTC Warning Pod with-sampling-798d8bbd67-2mbm2 FailedMount MountVolume.SetUp failed for volume "with-sampling-collector-tls-config-volume" : secret "with-sampling-collector-headless-tls" not found kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:41 +0000 UTC Normal ReplicaSet.apps with-sampling-798d8bbd67 SuccessfulCreate Created pod: with-sampling-798d8bbd67-2mbm2 replicaset-controller logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:41 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled up replica set with-sampling-798d8bbd67 to 1 deployment-controller logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:42 +0000 UTC Normal Pod with-sampling-798d8bbd67-2mbm2 AddedInterface Add eth0 [10.128.2.55/23] from ovn-kubernetes logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:42 +0000 UTC Normal Pod with-sampling-798d8bbd67-2mbm2.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:42 +0000 UTC Normal Pod with-sampling-798d8bbd67-2mbm2.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:42 +0000 UTC Normal Pod with-sampling-798d8bbd67-2mbm2.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:42 +0000 UTC Normal Pod with-sampling-798d8bbd67-2mbm2.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:42 +0000 UTC Normal Pod with-sampling-798d8bbd67-2mbm2.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:42 +0000 UTC Normal Pod with-sampling-798d8bbd67-2mbm2.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:48 +0000 UTC Normal Pod with-sampling-798d8bbd67-2mbm2.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:48 +0000 UTC Normal Pod with-sampling-798d8bbd67-2mbm2.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:48 +0000 UTC Normal ReplicaSet.apps with-sampling-798d8bbd67 SuccessfulDelete Deleted pod: with-sampling-798d8bbd67-2mbm2 replicaset-controller logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:48 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled down replica set with-sampling-798d8bbd67 to 0 from 1 deployment-controller logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:49 +0000 UTC Normal Pod with-sampling-6c8c8fb9c5-h5svc Binding Scheduled Successfully assigned kuttl-test-square-bull/with-sampling-6c8c8fb9c5-h5svc to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:49 +0000 UTC Normal Pod with-sampling-6c8c8fb9c5-h5svc AddedInterface Add eth0 [10.128.2.56/23] from ovn-kubernetes logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:49 +0000 UTC Normal Pod with-sampling-6c8c8fb9c5-h5svc.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:49 +0000 UTC Normal Pod with-sampling-6c8c8fb9c5-h5svc.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:49 +0000 UTC Normal Pod with-sampling-6c8c8fb9c5-h5svc.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:49 +0000 UTC Normal Pod with-sampling-6c8c8fb9c5-h5svc.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:49 +0000 UTC Normal ReplicaSet.apps with-sampling-6c8c8fb9c5 SuccessfulCreate Created pod: with-sampling-6c8c8fb9c5-h5svc replicaset-controller logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:49 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled up replica set with-sampling-6c8c8fb9c5 to 1 deployment-controller logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:50 +0000 UTC Normal Pod with-sampling-6c8c8fb9c5-h5svc.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:50 +0000 UTC Normal Pod with-sampling-6c8c8fb9c5-h5svc.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:52 +0000 UTC Normal Pod check-span-mz2w9 Binding Scheduled Successfully assigned kuttl-test-square-bull/check-span-mz2w9 to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:52 +0000 UTC Normal Pod check-span-mz2w9 AddedInterface Add eth0 [10.129.2.27/23] from ovn-kubernetes logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:52 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-mz2w9 job-controller logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:52 +0000 UTC Normal Pod report-span-xnp2w Binding Scheduled Successfully assigned kuttl-test-square-bull/report-span-xnp2w to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:52 +0000 UTC Normal Pod report-span-xnp2w AddedInterface Add eth0 [10.129.2.26/23] from ovn-kubernetes logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:52 +0000 UTC Normal Pod report-span-xnp2w.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:52 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-xnp2w job-controller logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:53 +0000 UTC Normal Pod check-span-mz2w9.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:53 +0000 UTC Normal Pod check-span-mz2w9.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:53 +0000 UTC Normal Pod check-span-mz2w9.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:53 +0000 UTC Normal Pod report-span-xnp2w.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:04:53 +0000 UTC Normal Pod report-span-xnp2w.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:05:04 | examples-with-sampling | 2023-12-04 07:05:03 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:05:04 | examples-with-sampling | Deleting namespace: kuttl-test-square-bull === CONT kuttl/harness/examples-with-cassandra logger.go:42: 07:05:23 | examples-with-cassandra | Creating namespace: kuttl-test-lucky-filly logger.go:42: 07:05:23 | examples-with-cassandra/0-install | starting test step 0-install logger.go:42: 07:05:23 | examples-with-cassandra/0-install | running command: [sh -c cd /tmp/jaeger-tests && make cassandra STORAGE_NAMESPACE=$NAMESPACE] logger.go:42: 07:05:23 | examples-with-cassandra/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 07:05:23 | examples-with-cassandra/0-install | >>>> Creating namespace kuttl-test-lucky-filly logger.go:42: 07:05:23 | examples-with-cassandra/0-install | kubectl create namespace kuttl-test-lucky-filly 2>&1 | grep -v "already exists" || true logger.go:42: 07:05:23 | examples-with-cassandra/0-install | kubectl create -f ./tests/cassandra.yml --namespace kuttl-test-lucky-filly 2>&1 | grep -v "already exists" || true logger.go:42: 07:05:24 | examples-with-cassandra/0-install | service/cassandra created logger.go:42: 07:05:24 | examples-with-cassandra/0-install | statefulset.apps/cassandra created logger.go:42: 07:05:24 | examples-with-cassandra/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 07:05:26 | examples-with-cassandra/0-install | test step completed 0-install logger.go:42: 07:05:26 | examples-with-cassandra/1-install | starting test step 1-install logger.go:42: 07:05:26 | examples-with-cassandra/1-install | Jaeger:kuttl-test-lucky-filly/with-cassandra created logger.go:42: 07:05:45 | examples-with-cassandra/1-install | test step completed 1-install logger.go:42: 07:05:45 | examples-with-cassandra/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:05:45 | examples-with-cassandra/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-cassandra /dev/null] logger.go:42: 07:05:47 | examples-with-cassandra/2-smoke-test | Warning: resource jaegers/with-cassandra is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:06:06 | examples-with-cassandra/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:06:07 | examples-with-cassandra/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:06:07 | examples-with-cassandra/2-smoke-test | job.batch/report-span created logger.go:42: 07:06:07 | examples-with-cassandra/2-smoke-test | job.batch/check-span created logger.go:42: 07:06:19 | examples-with-cassandra/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:06:19 | examples-with-cassandra | examples-with-cassandra events from ns kuttl-test-lucky-filly: logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:24 +0000 UTC Normal Pod cassandra-0 Binding Scheduled Successfully assigned kuttl-test-lucky-filly/cassandra-0 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:24 +0000 UTC Normal Pod cassandra-0 AddedInterface Add eth0 [10.128.2.57/23] from ovn-kubernetes logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:24 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulled Container image "cassandra:3.11" already present on machine kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:24 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:24 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:24 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-0 in StatefulSet cassandra successful statefulset-controller logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:25 +0000 UTC Normal Pod cassandra-1 Binding Scheduled Successfully assigned kuttl-test-lucky-filly/cassandra-1 to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:25 +0000 UTC Normal Pod cassandra-1 AddedInterface Add eth0 [10.131.0.55/23] from ovn-kubernetes logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:25 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulled Container image "cassandra:3.11" already present on machine kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:25 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-1 in StatefulSet cassandra successful statefulset-controller logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:26 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:26 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:29 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-gzdwz Binding Scheduled Successfully assigned kuttl-test-lucky-filly/with-cassandra-cassandra-schema-job-gzdwz to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:29 +0000 UTC Normal Job.batch with-cassandra-cassandra-schema-job SuccessfulCreate Created pod: with-cassandra-cassandra-schema-job-gzdwz job-controller logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:30 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-gzdwz AddedInterface Add eth0 [10.129.2.28/23] from ovn-kubernetes logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:30 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-gzdwz.spec.containers{with-cassandra-cassandra-schema-job} Pulling Pulling image "jaegertracing/jaeger-cassandra-schema:1.47.0" kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:35 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-gzdwz.spec.containers{with-cassandra-cassandra-schema-job} Pulled Successfully pulled image "jaegertracing/jaeger-cassandra-schema:1.47.0" in 4.795s (4.795s including waiting) kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:35 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-gzdwz.spec.containers{with-cassandra-cassandra-schema-job} Created Created container with-cassandra-cassandra-schema-job kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:35 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-gzdwz.spec.containers{with-cassandra-cassandra-schema-job} Started Started container with-cassandra-cassandra-schema-job kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:42 +0000 UTC Normal Job.batch with-cassandra-cassandra-schema-job Completed Job completed job-controller logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:43 +0000 UTC Normal Pod with-cassandra-6dd79b777-t2w82 Binding Scheduled Successfully assigned kuttl-test-lucky-filly/with-cassandra-6dd79b777-t2w82 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:43 +0000 UTC Normal Pod with-cassandra-6dd79b777-t2w82 AddedInterface Add eth0 [10.128.2.58/23] from ovn-kubernetes logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:43 +0000 UTC Normal Pod with-cassandra-6dd79b777-t2w82.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:43 +0000 UTC Normal Pod with-cassandra-6dd79b777-t2w82.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:43 +0000 UTC Normal Pod with-cassandra-6dd79b777-t2w82.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:43 +0000 UTC Normal Pod with-cassandra-6dd79b777-t2w82.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:43 +0000 UTC Normal Pod with-cassandra-6dd79b777-t2w82.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:43 +0000 UTC Normal Pod with-cassandra-6dd79b777-t2w82.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:43 +0000 UTC Normal ReplicaSet.apps with-cassandra-6dd79b777 SuccessfulCreate Created pod: with-cassandra-6dd79b777-t2w82 replicaset-controller logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:43 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled up replica set with-cassandra-6dd79b777 to 1 deployment-controller logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:48 +0000 UTC Normal Pod with-cassandra-6dd79b777-t2w82.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:48 +0000 UTC Normal Pod with-cassandra-6dd79b777-t2w82.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:48 +0000 UTC Normal ReplicaSet.apps with-cassandra-6dd79b777 SuccessfulDelete Deleted pod: with-cassandra-6dd79b777-t2w82 replicaset-controller logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:48 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled down replica set with-cassandra-6dd79b777 to 0 from 1 deployment-controller logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:49 +0000 UTC Normal Pod with-cassandra-6fbdccdfc-kh6sw Binding Scheduled Successfully assigned kuttl-test-lucky-filly/with-cassandra-6fbdccdfc-kh6sw to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:49 +0000 UTC Normal ReplicaSet.apps with-cassandra-6fbdccdfc SuccessfulCreate Created pod: with-cassandra-6fbdccdfc-kh6sw replicaset-controller logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:49 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled up replica set with-cassandra-6fbdccdfc to 1 deployment-controller logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:50 +0000 UTC Normal Pod with-cassandra-6fbdccdfc-kh6sw AddedInterface Add eth0 [10.128.2.59/23] from ovn-kubernetes logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:50 +0000 UTC Normal Pod with-cassandra-6fbdccdfc-kh6sw.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:50 +0000 UTC Normal Pod with-cassandra-6fbdccdfc-kh6sw.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:50 +0000 UTC Normal Pod with-cassandra-6fbdccdfc-kh6sw.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:50 +0000 UTC Normal Pod with-cassandra-6fbdccdfc-kh6sw.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:50 +0000 UTC Normal Pod with-cassandra-6fbdccdfc-kh6sw.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:50 +0000 UTC Normal Pod with-cassandra-6fbdccdfc-kh6sw.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:05:52 +0000 UTC Warning Pod with-cassandra-6fbdccdfc-kh6sw.spec.containers{jaeger} BackOff Back-off restarting failed container jaeger in pod with-cassandra-6fbdccdfc-kh6sw_kuttl-test-lucky-filly(ac9e7bf0-a971-4859-924a-32a7026d4a6c) kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:06:07 +0000 UTC Normal Pod check-span-t2fps Binding Scheduled Successfully assigned kuttl-test-lucky-filly/check-span-t2fps to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:06:07 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-t2fps job-controller logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:06:07 +0000 UTC Normal Pod report-span-vcrkj Binding Scheduled Successfully assigned kuttl-test-lucky-filly/report-span-vcrkj to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:06:07 +0000 UTC Normal Pod report-span-vcrkj AddedInterface Add eth0 [10.129.2.29/23] from ovn-kubernetes logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:06:07 +0000 UTC Normal Pod report-span-vcrkj.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:06:07 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-vcrkj job-controller logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:06:08 +0000 UTC Normal Pod check-span-t2fps AddedInterface Add eth0 [10.129.2.30/23] from ovn-kubernetes logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:06:08 +0000 UTC Normal Pod check-span-t2fps.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:06:08 +0000 UTC Normal Pod check-span-t2fps.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:06:08 +0000 UTC Normal Pod check-span-t2fps.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:06:08 +0000 UTC Normal Pod report-span-vcrkj.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:06:08 +0000 UTC Normal Pod report-span-vcrkj.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:06:19 | examples-with-cassandra | 2023-12-04 07:06:19 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:06:19 | examples-with-cassandra | Deleting namespace: kuttl-test-lucky-filly === CONT kuttl/harness/examples-with-badger-and-volume logger.go:42: 07:06:32 | examples-with-badger-and-volume | Creating namespace: kuttl-test-above-louse logger.go:42: 07:06:32 | examples-with-badger-and-volume/0-install | starting test step 0-install logger.go:42: 07:06:32 | examples-with-badger-and-volume/0-install | Jaeger:kuttl-test-above-louse/with-badger-and-volume created logger.go:42: 07:06:37 | examples-with-badger-and-volume/0-install | test step completed 0-install logger.go:42: 07:06:37 | examples-with-badger-and-volume/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:06:37 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-badger-and-volume /dev/null] logger.go:42: 07:06:39 | examples-with-badger-and-volume/1-smoke-test | Warning: resource jaegers/with-badger-and-volume is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:06:45 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:06:46 | examples-with-badger-and-volume/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:06:46 | examples-with-badger-and-volume/1-smoke-test | job.batch/report-span created logger.go:42: 07:06:46 | examples-with-badger-and-volume/1-smoke-test | job.batch/check-span created logger.go:42: 07:16:47 | examples-with-badger-and-volume/1-smoke-test | test step failed 1-smoke-test case.go:364: failed in step 1-smoke-test case.go:366: --- Job:kuttl-test-above-louse/check-span +++ Job:kuttl-test-above-louse/check-span @@ -1,8 +1,141 @@ apiVersion: batch/v1 kind: Job metadata: + annotations: + kubectl.kubernetes.io/last-applied-configuration: | + {"apiVersion":"batch/v1","kind":"Job","metadata":{"annotations":{},"name":"check-span","namespace":"kuttl-test-above-louse"},"spec":{"backoffLimit":15,"template":{"spec":{"containers":[{"command":["./query"],"env":[{"name":"SERVICE_NAME","value":"smoke-test-service"},{"name":"QUERY_HOST","value":"https://with-badger-and-volume-query:443"},{"name":"SECRET_PATH","value":"/var/run/secrets/api-token/token"}],"image":"registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3","name":"asserts-container","volumeMounts":[{"mountPath":"/var/run/secrets/api-token","name":"token-api-volume"}]}],"restartPolicy":"OnFailure","volumes":[{"name":"token-api-volume","secret":{"secretName":"e2e-test"}}]}}}} + labels: + batch.kubernetes.io/controller-uid: 2324c924-ffa1-417a-ad4c-e54f59b23e91 + batch.kubernetes.io/job-name: check-span + controller-uid: 2324c924-ffa1-417a-ad4c-e54f59b23e91 + job-name: check-span + managedFields: + - apiVersion: batch/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:kubectl.kubernetes.io/last-applied-configuration: {} + f:spec: + f:backoffLimit: {} + f:completionMode: {} + f:completions: {} + f:parallelism: {} + f:suspend: {} + f:template: + f:spec: + f:containers: + k:{"name":"asserts-container"}: + .: {} + f:command: {} + f:env: + .: {} + k:{"name":"QUERY_HOST"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SECRET_PATH"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SERVICE_NAME"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:name: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/var/run/secrets/api-token"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"token-api-volume"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: kubectl-client-side-apply + operation: Update + time: "2023-12-04T07:06:46Z" + - apiVersion: batch/v1 + fieldsType: FieldsV1 + fieldsV1: + f:status: + f:active: {} + f:ready: {} + f:startTime: {} + f:uncountedTerminatedPods: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2023-12-04T07:15:05Z" name: check-span namespace: kuttl-test-above-louse +spec: + backoffLimit: 15 + completionMode: NonIndexed + completions: 1 + parallelism: 1 + selector: + matchLabels: + batch.kubernetes.io/controller-uid: 2324c924-ffa1-417a-ad4c-e54f59b23e91 + suspend: false + template: + metadata: + creationTimestamp: null + labels: + batch.kubernetes.io/controller-uid: 2324c924-ffa1-417a-ad4c-e54f59b23e91 + batch.kubernetes.io/job-name: check-span + controller-uid: 2324c924-ffa1-417a-ad4c-e54f59b23e91 + job-name: check-span + spec: + containers: + - command: + - ./query + env: + - name: SERVICE_NAME + value: smoke-test-service + - name: QUERY_HOST + value: https://with-badger-and-volume-query:443 + - name: SECRET_PATH + value: /var/run/secrets/api-token/token + image: registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 + imagePullPolicy: IfNotPresent + name: asserts-container + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /var/run/secrets/api-token + name: token-api-volume + dnsPolicy: ClusterFirst + restartPolicy: OnFailure + schedulerName: default-scheduler + securityContext: {} + terminationGracePeriodSeconds: 30 + volumes: + - name: token-api-volume + secret: + defaultMode: 420 + secretName: e2e-test status: - succeeded: 1 + active: 1 + ready: 1 + startTime: "2023-12-04T07:06:46Z" + uncountedTerminatedPods: {} case.go:366: resource Job:kuttl-test-above-louse/check-span: .status.succeeded: key is missing from map logger.go:42: 07:16:47 | examples-with-badger-and-volume | examples-with-badger-and-volume events from ns kuttl-test-above-louse: logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:36 +0000 UTC Normal Pod with-badger-and-volume-cb4dddffb-rrrwc Binding Scheduled Successfully assigned kuttl-test-above-louse/with-badger-and-volume-cb4dddffb-rrrwc to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:36 +0000 UTC Normal Pod with-badger-and-volume-cb4dddffb-rrrwc AddedInterface Add eth0 [10.128.2.60/23] from ovn-kubernetes logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:36 +0000 UTC Normal Pod with-badger-and-volume-cb4dddffb-rrrwc.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:36 +0000 UTC Normal Pod with-badger-and-volume-cb4dddffb-rrrwc.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:36 +0000 UTC Normal Pod with-badger-and-volume-cb4dddffb-rrrwc.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:36 +0000 UTC Normal Pod with-badger-and-volume-cb4dddffb-rrrwc.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:36 +0000 UTC Normal Pod with-badger-and-volume-cb4dddffb-rrrwc.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:36 +0000 UTC Normal Pod with-badger-and-volume-cb4dddffb-rrrwc.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:36 +0000 UTC Normal ReplicaSet.apps with-badger-and-volume-cb4dddffb SuccessfulCreate Created pod: with-badger-and-volume-cb4dddffb-rrrwc replicaset-controller logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:36 +0000 UTC Normal Deployment.apps with-badger-and-volume ScalingReplicaSet Scaled up replica set with-badger-and-volume-cb4dddffb to 1 deployment-controller logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:46 +0000 UTC Normal Pod check-span-r5k9d Binding Scheduled Successfully assigned kuttl-test-above-louse/check-span-r5k9d to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:46 +0000 UTC Normal Pod check-span-r5k9d AddedInterface Add eth0 [10.131.0.56/23] from ovn-kubernetes logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:46 +0000 UTC Normal Pod check-span-r5k9d.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:46 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-r5k9d job-controller logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:46 +0000 UTC Normal Pod report-span-h5nml Binding Scheduled Successfully assigned kuttl-test-above-louse/report-span-h5nml to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:46 +0000 UTC Normal Pod report-span-h5nml AddedInterface Add eth0 [10.129.2.31/23] from ovn-kubernetes logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:46 +0000 UTC Normal Pod report-span-h5nml.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:46 +0000 UTC Normal Pod report-span-h5nml.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:46 +0000 UTC Normal Pod report-span-h5nml.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:46 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-h5nml job-controller logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:47 +0000 UTC Normal Pod check-span-r5k9d.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:06:47 +0000 UTC Normal Pod check-span-r5k9d.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:16:47 | examples-with-badger-and-volume | 2023-12-04 07:11:52 +0000 UTC Warning Pod check-span-r5k9d.spec.containers{asserts-container} BackOff Back-off restarting failed container asserts-container in pod check-span-r5k9d_kuttl-test-above-louse(b6d3a6db-98dd-446f-b721-77183b646312) kubelet logger.go:42: 07:16:47 | examples-with-badger-and-volume | Deleting namespace: kuttl-test-above-louse === CONT kuttl/harness/examples-with-badger logger.go:42: 07:17:01 | examples-with-badger | Creating namespace: kuttl-test-driven-moose logger.go:42: 07:17:02 | examples-with-badger/0-install | starting test step 0-install logger.go:42: 07:17:02 | examples-with-badger/0-install | Jaeger:kuttl-test-driven-moose/with-badger created logger.go:42: 07:17:08 | examples-with-badger/0-install | test step completed 0-install logger.go:42: 07:17:08 | examples-with-badger/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:17:08 | examples-with-badger/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-badger /dev/null] logger.go:42: 07:17:09 | examples-with-badger/1-smoke-test | Warning: resource jaegers/with-badger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:17:16 | examples-with-badger/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:17:16 | examples-with-badger/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:17:17 | examples-with-badger/1-smoke-test | job.batch/report-span created logger.go:42: 07:17:17 | examples-with-badger/1-smoke-test | job.batch/check-span created logger.go:42: 07:17:28 | examples-with-badger/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:17:28 | examples-with-badger | examples-with-badger events from ns kuttl-test-driven-moose: logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:05 +0000 UTC Normal Pod with-badger-5fbf6d8c44-h8hcr Binding Scheduled Successfully assigned kuttl-test-driven-moose/with-badger-5fbf6d8c44-h8hcr to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:05 +0000 UTC Normal ReplicaSet.apps with-badger-5fbf6d8c44 SuccessfulCreate Created pod: with-badger-5fbf6d8c44-h8hcr replicaset-controller logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:05 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-5fbf6d8c44 to 1 deployment-controller logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:06 +0000 UTC Normal Pod with-badger-5fbf6d8c44-h8hcr AddedInterface Add eth0 [10.128.2.62/23] from ovn-kubernetes logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:06 +0000 UTC Normal Pod with-badger-5fbf6d8c44-h8hcr.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:06 +0000 UTC Normal Pod with-badger-5fbf6d8c44-h8hcr.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:06 +0000 UTC Normal Pod with-badger-5fbf6d8c44-h8hcr.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:06 +0000 UTC Normal Pod with-badger-5fbf6d8c44-h8hcr.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:06 +0000 UTC Normal Pod with-badger-5fbf6d8c44-h8hcr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:06 +0000 UTC Normal Pod with-badger-5fbf6d8c44-h8hcr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:12 +0000 UTC Normal Pod with-badger-5fbf6d8c44-h8hcr.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:12 +0000 UTC Normal Pod with-badger-5fbf6d8c44-h8hcr.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:12 +0000 UTC Normal ReplicaSet.apps with-badger-5fbf6d8c44 SuccessfulDelete Deleted pod: with-badger-5fbf6d8c44-h8hcr replicaset-controller logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:12 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled down replica set with-badger-5fbf6d8c44 to 0 from 1 deployment-controller logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:13 +0000 UTC Normal Pod with-badger-8565bc986c-6hgp6 Binding Scheduled Successfully assigned kuttl-test-driven-moose/with-badger-8565bc986c-6hgp6 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:13 +0000 UTC Normal Pod with-badger-8565bc986c-6hgp6 AddedInterface Add eth0 [10.128.2.63/23] from ovn-kubernetes logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:13 +0000 UTC Normal Pod with-badger-8565bc986c-6hgp6.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:13 +0000 UTC Normal Pod with-badger-8565bc986c-6hgp6.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:13 +0000 UTC Normal Pod with-badger-8565bc986c-6hgp6.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:13 +0000 UTC Normal Pod with-badger-8565bc986c-6hgp6.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:13 +0000 UTC Normal Pod with-badger-8565bc986c-6hgp6.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:13 +0000 UTC Normal Pod with-badger-8565bc986c-6hgp6.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:13 +0000 UTC Normal ReplicaSet.apps with-badger-8565bc986c SuccessfulCreate Created pod: with-badger-8565bc986c-6hgp6 replicaset-controller logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:13 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-8565bc986c to 1 deployment-controller logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:17 +0000 UTC Normal Pod check-span-kgmjj Binding Scheduled Successfully assigned kuttl-test-driven-moose/check-span-kgmjj to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:17 +0000 UTC Normal Pod check-span-kgmjj AddedInterface Add eth0 [10.131.0.57/23] from ovn-kubernetes logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:17 +0000 UTC Normal Pod check-span-kgmjj.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:17 +0000 UTC Normal Pod check-span-kgmjj.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:17 +0000 UTC Normal Pod check-span-kgmjj.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:17 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-kgmjj job-controller logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:17 +0000 UTC Normal Pod report-span-j9r79 Binding Scheduled Successfully assigned kuttl-test-driven-moose/report-span-j9r79 to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:17 +0000 UTC Normal Pod report-span-j9r79 AddedInterface Add eth0 [10.129.2.32/23] from ovn-kubernetes logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:17 +0000 UTC Normal Pod report-span-j9r79.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:17 +0000 UTC Normal Pod report-span-j9r79.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:17 +0000 UTC Normal Pod report-span-j9r79.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:17 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-j9r79 job-controller logger.go:42: 07:17:28 | examples-with-badger | 2023-12-04 07:17:27 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:17:28 | examples-with-badger | Deleting namespace: kuttl-test-driven-moose === CONT kuttl/harness/examples-simplest logger.go:42: 07:17:41 | examples-simplest | Creating namespace: kuttl-test-renewed-aardvark logger.go:42: 07:17:41 | examples-simplest/0-install | starting test step 0-install logger.go:42: 07:17:41 | examples-simplest/0-install | Jaeger:kuttl-test-renewed-aardvark/simplest created logger.go:42: 07:17:48 | examples-simplest/0-install | test step completed 0-install logger.go:42: 07:17:48 | examples-simplest/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:17:48 | examples-simplest/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 07:17:49 | examples-simplest/1-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:17:56 | examples-simplest/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:17:57 | examples-simplest/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:17:57 | examples-simplest/1-smoke-test | job.batch/report-span created logger.go:42: 07:17:57 | examples-simplest/1-smoke-test | job.batch/check-span created logger.go:42: 07:18:09 | examples-simplest/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:18:09 | examples-simplest | examples-simplest events from ns kuttl-test-renewed-aardvark: logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:44 +0000 UTC Normal Pod simplest-78c7979974-42s9h Binding Scheduled Successfully assigned kuttl-test-renewed-aardvark/simplest-78c7979974-42s9h to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:44 +0000 UTC Normal ReplicaSet.apps simplest-78c7979974 SuccessfulCreate Created pod: simplest-78c7979974-42s9h replicaset-controller logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:44 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-78c7979974 to 1 deployment-controller logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:45 +0000 UTC Normal Pod simplest-78c7979974-42s9h AddedInterface Add eth0 [10.128.2.64/23] from ovn-kubernetes logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:45 +0000 UTC Normal Pod simplest-78c7979974-42s9h.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:45 +0000 UTC Normal Pod simplest-78c7979974-42s9h.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:45 +0000 UTC Normal Pod simplest-78c7979974-42s9h.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:45 +0000 UTC Normal Pod simplest-78c7979974-42s9h.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:45 +0000 UTC Normal Pod simplest-78c7979974-42s9h.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:45 +0000 UTC Normal Pod simplest-78c7979974-42s9h.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:51 +0000 UTC Normal Pod simplest-78c7979974-42s9h.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:51 +0000 UTC Normal Pod simplest-78c7979974-42s9h.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:51 +0000 UTC Normal ReplicaSet.apps simplest-78c7979974 SuccessfulDelete Deleted pod: simplest-78c7979974-42s9h replicaset-controller logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:51 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-78c7979974 to 0 from 1 deployment-controller logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:52 +0000 UTC Normal Pod simplest-7bd754bbb4-z68wf Binding Scheduled Successfully assigned kuttl-test-renewed-aardvark/simplest-7bd754bbb4-z68wf to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:52 +0000 UTC Normal Pod simplest-7bd754bbb4-z68wf AddedInterface Add eth0 [10.128.2.65/23] from ovn-kubernetes logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:52 +0000 UTC Normal Pod simplest-7bd754bbb4-z68wf.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:52 +0000 UTC Normal Pod simplest-7bd754bbb4-z68wf.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:52 +0000 UTC Normal Pod simplest-7bd754bbb4-z68wf.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:52 +0000 UTC Normal Pod simplest-7bd754bbb4-z68wf.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:52 +0000 UTC Normal Pod simplest-7bd754bbb4-z68wf.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:52 +0000 UTC Normal Pod simplest-7bd754bbb4-z68wf.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:52 +0000 UTC Normal ReplicaSet.apps simplest-7bd754bbb4 SuccessfulCreate Created pod: simplest-7bd754bbb4-z68wf replicaset-controller logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:52 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-7bd754bbb4 to 1 deployment-controller logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:57 +0000 UTC Normal Pod check-span-972ld Binding Scheduled Successfully assigned kuttl-test-renewed-aardvark/check-span-972ld to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:57 +0000 UTC Normal Pod check-span-972ld AddedInterface Add eth0 [10.131.0.58/23] from ovn-kubernetes logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:57 +0000 UTC Normal Pod check-span-972ld.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:57 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-972ld job-controller logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:57 +0000 UTC Normal Pod report-span-g5xlj Binding Scheduled Successfully assigned kuttl-test-renewed-aardvark/report-span-g5xlj to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:57 +0000 UTC Normal Pod report-span-g5xlj AddedInterface Add eth0 [10.129.2.33/23] from ovn-kubernetes logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:57 +0000 UTC Normal Pod report-span-g5xlj.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:57 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-g5xlj job-controller logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:58 +0000 UTC Normal Pod check-span-972ld.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:58 +0000 UTC Normal Pod check-span-972ld.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:58 +0000 UTC Normal Pod report-span-g5xlj.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:17:58 +0000 UTC Normal Pod report-span-g5xlj.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:18:09 | examples-simplest | 2023-12-04 07:18:09 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:18:09 | examples-simplest | Deleting namespace: kuttl-test-renewed-aardvark === CONT kuttl/harness/examples-simple-prod-with-volumes logger.go:42: 07:18:22 | examples-simple-prod-with-volumes | Ignoring 03-check-volume.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:18:22 | examples-simple-prod-with-volumes | Creating namespace: kuttl-test-liberal-bengal logger.go:42: 07:18:22 | examples-simple-prod-with-volumes/1-install | starting test step 1-install logger.go:42: 07:18:22 | examples-simple-prod-with-volumes/1-install | Jaeger:kuttl-test-liberal-bengal/simple-prod created logger.go:42: 07:18:59 | examples-simple-prod-with-volumes/1-install | test step completed 1-install logger.go:42: 07:18:59 | examples-simple-prod-with-volumes/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:18:59 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 07:19:00 | examples-simple-prod-with-volumes/2-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:19:08 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:19:09 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:19:09 | examples-simple-prod-with-volumes/2-smoke-test | job.batch/report-span created logger.go:42: 07:19:09 | examples-simple-prod-with-volumes/2-smoke-test | job.batch/check-span created logger.go:42: 07:19:20 | examples-simple-prod-with-volumes/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:19:20 | examples-simple-prod-with-volumes/3-check-volume | starting test step 3-check-volume logger.go:42: 07:19:20 | examples-simple-prod-with-volumes/3-check-volume | running command: [sh -c kubectl exec $(kubectl get pods -n $NAMESPACE -l app=jaeger -l app.kubernetes.io/component=collector -o yaml | /tmp/jaeger-tests/bin/yq e '.items[0].metadata.name') -n $NAMESPACE -- ls /usr/share/elasticsearch/data] logger.go:42: 07:19:21 | examples-simple-prod-with-volumes/3-check-volume | test step completed 3-check-volume logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | examples-simple-prod-with-volumes events from ns kuttl-test-liberal-bengal: logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:28 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestliberalbengalsimpleprod-1-548f94758d SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestliberalbengalsimpleprod-1-548f9txmc8 replicaset-controller logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:28 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestliberalbengalsimpleprod-1-548f9txmc8 Binding Scheduled Successfully assigned kuttl-test-liberal-bengal/elasticsearch-cdm-kuttltestliberalbengalsimpleprod-1-548f9txmc8 to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:28 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestliberalbengalsimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestliberalbengalsimpleprod-1-548f94758d to 1 deployment-controller logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestliberalbengalsimpleprod-1-548f9txmc8 AddedInterface Add eth0 [10.129.2.34/23] from ovn-kubernetes logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestliberalbengalsimpleprod-1-548f9txmc8.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestliberalbengalsimpleprod-1-548f9txmc8.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestliberalbengalsimpleprod-1-548f9txmc8.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestliberalbengalsimpleprod-1-548f9txmc8.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestliberalbengalsimpleprod-1-548f9txmc8.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:29 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestliberalbengalsimpleprod-1-548f9txmc8.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:39 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestliberalbengalsimpleprod-1-548f9txmc8.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:44 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestliberalbengalsimpleprod-1-548f9txmc8.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:55 +0000 UTC Normal Pod simple-prod-collector-55ff468b9d-kmrgp Binding Scheduled Successfully assigned kuttl-test-liberal-bengal/simple-prod-collector-55ff468b9d-kmrgp to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:55 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-55ff468b9d SuccessfulCreate Created pod: simple-prod-collector-55ff468b9d-kmrgp replicaset-controller logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:55 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-55ff468b9d to 1 deployment-controller logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:55 +0000 UTC Normal Pod simple-prod-query-7b5b46c878-2rwct Binding Scheduled Successfully assigned kuttl-test-liberal-bengal/simple-prod-query-7b5b46c878-2rwct to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:55 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7b5b46c878 SuccessfulCreate Created pod: simple-prod-query-7b5b46c878-2rwct replicaset-controller logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:55 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-7b5b46c878 to 1 deployment-controller logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:56 +0000 UTC Normal Pod simple-prod-collector-55ff468b9d-kmrgp AddedInterface Add eth0 [10.128.2.66/23] from ovn-kubernetes logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:56 +0000 UTC Normal Pod simple-prod-collector-55ff468b9d-kmrgp.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:56 +0000 UTC Normal Pod simple-prod-collector-55ff468b9d-kmrgp.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:56 +0000 UTC Normal Pod simple-prod-collector-55ff468b9d-kmrgp.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:56 +0000 UTC Normal Pod simple-prod-query-7b5b46c878-2rwct AddedInterface Add eth0 [10.128.2.67/23] from ovn-kubernetes logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:56 +0000 UTC Normal Pod simple-prod-query-7b5b46c878-2rwct.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:56 +0000 UTC Normal Pod simple-prod-query-7b5b46c878-2rwct.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:56 +0000 UTC Normal Pod simple-prod-query-7b5b46c878-2rwct.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:56 +0000 UTC Normal Pod simple-prod-query-7b5b46c878-2rwct.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:56 +0000 UTC Normal Pod simple-prod-query-7b5b46c878-2rwct.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:56 +0000 UTC Normal Pod simple-prod-query-7b5b46c878-2rwct.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:56 +0000 UTC Normal Pod simple-prod-query-7b5b46c878-2rwct.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:56 +0000 UTC Normal Pod simple-prod-query-7b5b46c878-2rwct.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:18:56 +0000 UTC Normal Pod simple-prod-query-7b5b46c878-2rwct.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:04 +0000 UTC Normal Pod simple-prod-query-7b5b46c878-2rwct.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:04 +0000 UTC Normal Pod simple-prod-query-7b5b46c878-2rwct.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:04 +0000 UTC Normal Pod simple-prod-query-7b5b46c878-2rwct.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:04 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7b5b46c878 SuccessfulDelete Deleted pod: simple-prod-query-7b5b46c878-2rwct replicaset-controller logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:04 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-7b5b46c878 to 0 from 1 deployment-controller logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:05 +0000 UTC Normal Pod simple-prod-query-78bd879d87-42szs Binding Scheduled Successfully assigned kuttl-test-liberal-bengal/simple-prod-query-78bd879d87-42szs to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:05 +0000 UTC Normal ReplicaSet.apps simple-prod-query-78bd879d87 SuccessfulCreate Created pod: simple-prod-query-78bd879d87-42szs replicaset-controller logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:05 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-78bd879d87 to 1 deployment-controller logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:06 +0000 UTC Normal Pod simple-prod-query-78bd879d87-42szs AddedInterface Add eth0 [10.128.2.68/23] from ovn-kubernetes logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:06 +0000 UTC Normal Pod simple-prod-query-78bd879d87-42szs.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:06 +0000 UTC Normal Pod simple-prod-query-78bd879d87-42szs.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:06 +0000 UTC Normal Pod simple-prod-query-78bd879d87-42szs.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:06 +0000 UTC Normal Pod simple-prod-query-78bd879d87-42szs.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:06 +0000 UTC Normal Pod simple-prod-query-78bd879d87-42szs.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:06 +0000 UTC Normal Pod simple-prod-query-78bd879d87-42szs.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:06 +0000 UTC Normal Pod simple-prod-query-78bd879d87-42szs.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:06 +0000 UTC Normal Pod simple-prod-query-78bd879d87-42szs.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:06 +0000 UTC Normal Pod simple-prod-query-78bd879d87-42szs.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:09 +0000 UTC Normal Pod check-span-gh4c9 Binding Scheduled Successfully assigned kuttl-test-liberal-bengal/check-span-gh4c9 to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:09 +0000 UTC Normal Pod check-span-gh4c9 AddedInterface Add eth0 [10.131.0.60/23] from ovn-kubernetes logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:09 +0000 UTC Normal Pod check-span-gh4c9.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:09 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-gh4c9 job-controller logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:09 +0000 UTC Normal Pod report-span-mzvrz Binding Scheduled Successfully assigned kuttl-test-liberal-bengal/report-span-mzvrz to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:09 +0000 UTC Normal Pod report-span-mzvrz AddedInterface Add eth0 [10.131.0.59/23] from ovn-kubernetes logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:09 +0000 UTC Normal Pod report-span-mzvrz.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:09 +0000 UTC Normal Pod report-span-mzvrz.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:09 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-mzvrz job-controller logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:10 +0000 UTC Normal Pod check-span-gh4c9.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:10 +0000 UTC Normal Pod check-span-gh4c9.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:10 +0000 UTC Normal Pod report-span-mzvrz.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | 2023-12-04 07:19:20 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:19:21 | examples-simple-prod-with-volumes | Deleting namespace: kuttl-test-liberal-bengal === CONT kuttl/harness/examples-simple-prod logger.go:42: 07:19:33 | examples-simple-prod | Creating namespace: kuttl-test-engaging-locust logger.go:42: 07:19:34 | examples-simple-prod/1-install | starting test step 1-install logger.go:42: 07:19:34 | examples-simple-prod/1-install | Jaeger:kuttl-test-engaging-locust/simple-prod created logger.go:42: 07:20:10 | examples-simple-prod/1-install | test step completed 1-install logger.go:42: 07:20:10 | examples-simple-prod/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:20:10 | examples-simple-prod/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 07:20:11 | examples-simple-prod/2-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:20:17 | examples-simple-prod/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:20:18 | examples-simple-prod/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:20:18 | examples-simple-prod/2-smoke-test | job.batch/report-span created logger.go:42: 07:20:18 | examples-simple-prod/2-smoke-test | job.batch/check-span created logger.go:42: 07:20:31 | examples-simple-prod/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:20:31 | examples-simple-prod | examples-simple-prod events from ns kuttl-test-engaging-locust: logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:19:40 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestengaginglocustsimpleprod-1-6dc74ffcd SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestengaginglocustsimpleprod-1-6dc75kk8x replicaset-controller logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:19:40 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestengaginglocustsimpleprod-1-6dc75kk8x Binding Scheduled Successfully assigned kuttl-test-engaging-locust/elasticsearch-cdm-kuttltestengaginglocustsimpleprod-1-6dc75kk8x to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:19:40 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestengaginglocustsimpleprod-1-6dc75kk8x AddedInterface Add eth0 [10.129.2.35/23] from ovn-kubernetes logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:19:40 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestengaginglocustsimpleprod-1-6dc75kk8x.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:19:40 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestengaginglocustsimpleprod-1-6dc75kk8x.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:19:40 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestengaginglocustsimpleprod-1-6dc75kk8x.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:19:40 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestengaginglocustsimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestengaginglocustsimpleprod-1-6dc74ffcd to 1 deployment-controller logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:19:41 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestengaginglocustsimpleprod-1-6dc75kk8x.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:19:41 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestengaginglocustsimpleprod-1-6dc75kk8x.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:19:41 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestengaginglocustsimpleprod-1-6dc75kk8x.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:19:50 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestengaginglocustsimpleprod-1-6dc75kk8x.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:19:56 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestengaginglocustsimpleprod-1-6dc75kk8x.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:07 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-szwgx Binding Scheduled Successfully assigned kuttl-test-engaging-locust/simple-prod-collector-77fcbdc546-szwgx to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:07 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-szwgx AddedInterface Add eth0 [10.128.2.69/23] from ovn-kubernetes logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:07 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-szwgx.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:07 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-szwgx.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:07 +0000 UTC Normal Pod simple-prod-collector-77fcbdc546-szwgx.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:07 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-77fcbdc546 SuccessfulCreate Created pod: simple-prod-collector-77fcbdc546-szwgx replicaset-controller logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:07 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-77fcbdc546 to 1 deployment-controller logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:07 +0000 UTC Normal Pod simple-prod-query-55fbcc7b55-ntjfb Binding Scheduled Successfully assigned kuttl-test-engaging-locust/simple-prod-query-55fbcc7b55-ntjfb to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:07 +0000 UTC Warning Pod simple-prod-query-55fbcc7b55-ntjfb FailedMount MountVolume.SetUp failed for volume "simple-prod-ui-oauth-proxy-tls" : secret "simple-prod-ui-oauth-proxy-tls" not found kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:07 +0000 UTC Normal ReplicaSet.apps simple-prod-query-55fbcc7b55 SuccessfulCreate Created pod: simple-prod-query-55fbcc7b55-ntjfb replicaset-controller logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:07 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-55fbcc7b55 to 1 deployment-controller logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:08 +0000 UTC Normal Pod simple-prod-query-55fbcc7b55-ntjfb AddedInterface Add eth0 [10.128.2.70/23] from ovn-kubernetes logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:08 +0000 UTC Normal Pod simple-prod-query-55fbcc7b55-ntjfb.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:08 +0000 UTC Normal Pod simple-prod-query-55fbcc7b55-ntjfb.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:08 +0000 UTC Normal Pod simple-prod-query-55fbcc7b55-ntjfb.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:08 +0000 UTC Normal Pod simple-prod-query-55fbcc7b55-ntjfb.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:08 +0000 UTC Normal Pod simple-prod-query-55fbcc7b55-ntjfb.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:08 +0000 UTC Normal Pod simple-prod-query-55fbcc7b55-ntjfb.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:08 +0000 UTC Normal Pod simple-prod-query-55fbcc7b55-ntjfb.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:08 +0000 UTC Normal Pod simple-prod-query-55fbcc7b55-ntjfb.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:08 +0000 UTC Normal Pod simple-prod-query-55fbcc7b55-ntjfb.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:12 +0000 UTC Normal Pod simple-prod-query-55fbcc7b55-ntjfb.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:12 +0000 UTC Normal Pod simple-prod-query-55fbcc7b55-ntjfb.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:12 +0000 UTC Normal Pod simple-prod-query-55fbcc7b55-ntjfb.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:12 +0000 UTC Normal ReplicaSet.apps simple-prod-query-55fbcc7b55 SuccessfulDelete Deleted pod: simple-prod-query-55fbcc7b55-ntjfb replicaset-controller logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:12 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-55fbcc7b55 to 0 from 1 deployment-controller logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:13 +0000 UTC Normal Pod simple-prod-query-57847bfd56-n2n5v Binding Scheduled Successfully assigned kuttl-test-engaging-locust/simple-prod-query-57847bfd56-n2n5v to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:13 +0000 UTC Normal ReplicaSet.apps simple-prod-query-57847bfd56 SuccessfulCreate Created pod: simple-prod-query-57847bfd56-n2n5v replicaset-controller logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:13 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-57847bfd56 to 1 deployment-controller logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:14 +0000 UTC Normal Pod simple-prod-query-57847bfd56-n2n5v AddedInterface Add eth0 [10.128.2.71/23] from ovn-kubernetes logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:14 +0000 UTC Normal Pod simple-prod-query-57847bfd56-n2n5v.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:14 +0000 UTC Normal Pod simple-prod-query-57847bfd56-n2n5v.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:14 +0000 UTC Normal Pod simple-prod-query-57847bfd56-n2n5v.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:14 +0000 UTC Normal Pod simple-prod-query-57847bfd56-n2n5v.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:14 +0000 UTC Normal Pod simple-prod-query-57847bfd56-n2n5v.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:14 +0000 UTC Normal Pod simple-prod-query-57847bfd56-n2n5v.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:14 +0000 UTC Normal Pod simple-prod-query-57847bfd56-n2n5v.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:14 +0000 UTC Normal Pod simple-prod-query-57847bfd56-n2n5v.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:14 +0000 UTC Normal Pod simple-prod-query-57847bfd56-n2n5v.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:18 +0000 UTC Normal Pod check-span-jgsnf Binding Scheduled Successfully assigned kuttl-test-engaging-locust/check-span-jgsnf to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:18 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-jgsnf job-controller logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:18 +0000 UTC Normal Pod report-span-25pvq Binding Scheduled Successfully assigned kuttl-test-engaging-locust/report-span-25pvq to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:18 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-25pvq job-controller logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:19 +0000 UTC Normal Pod check-span-jgsnf AddedInterface Add eth0 [10.131.0.62/23] from ovn-kubernetes logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:19 +0000 UTC Normal Pod check-span-jgsnf.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:19 +0000 UTC Normal Pod check-span-jgsnf.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:19 +0000 UTC Normal Pod check-span-jgsnf.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:19 +0000 UTC Normal Pod report-span-25pvq AddedInterface Add eth0 [10.131.0.61/23] from ovn-kubernetes logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:19 +0000 UTC Normal Pod report-span-25pvq.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:19 +0000 UTC Normal Pod report-span-25pvq.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:19 +0000 UTC Normal Pod report-span-25pvq.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:22 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:22 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:22 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:20:31 | examples-simple-prod | 2023-12-04 07:20:30 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:20:31 | examples-simple-prod | Deleting namespace: kuttl-test-engaging-locust === CONT kuttl/harness/examples-business-application-injected-sidecar logger.go:42: 07:20:37 | examples-business-application-injected-sidecar | Creating namespace: kuttl-test-equal-duck logger.go:42: 07:20:37 | examples-business-application-injected-sidecar/0-install | starting test step 0-install logger.go:42: 07:20:37 | examples-business-application-injected-sidecar/0-install | Deployment:kuttl-test-equal-duck/myapp created logger.go:42: 07:20:37 | examples-business-application-injected-sidecar/0-install | test step completed 0-install logger.go:42: 07:20:37 | examples-business-application-injected-sidecar/1-install | starting test step 1-install logger.go:42: 07:20:38 | examples-business-application-injected-sidecar/1-install | Jaeger:kuttl-test-equal-duck/simplest created logger.go:42: 07:20:49 | examples-business-application-injected-sidecar/1-install | test step completed 1-install logger.go:42: 07:20:49 | examples-business-application-injected-sidecar/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:20:49 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 07:20:50 | examples-business-application-injected-sidecar/2-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:20:56 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:20:57 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:20:57 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/report-span created logger.go:42: 07:20:57 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/check-span created logger.go:42: 07:21:10 | examples-business-application-injected-sidecar/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | examples-business-application-injected-sidecar events from ns kuttl-test-equal-duck: logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:37 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-679f79d5f8 to 1 deployment-controller logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:38 +0000 UTC Normal Pod myapp-679f79d5f8-pcrgf Binding Scheduled Successfully assigned kuttl-test-equal-duck/myapp-679f79d5f8-pcrgf to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:38 +0000 UTC Normal Pod myapp-679f79d5f8-pcrgf AddedInterface Add eth0 [10.128.2.72/23] from ovn-kubernetes logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:38 +0000 UTC Normal Pod myapp-679f79d5f8-pcrgf.spec.containers{myapp} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:38 +0000 UTC Normal ReplicaSet.apps myapp-679f79d5f8 SuccessfulCreate Created pod: myapp-679f79d5f8-pcrgf replicaset-controller logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:41 +0000 UTC Normal Pod myapp-5fdbdc8cd5-sjcxw Binding Scheduled Successfully assigned kuttl-test-equal-duck/myapp-5fdbdc8cd5-sjcxw to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:41 +0000 UTC Warning Pod myapp-5fdbdc8cd5-sjcxw FailedMount MountVolume.SetUp failed for volume "simplest-trusted-ca" : configmap "simplest-trusted-ca" not found kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:41 +0000 UTC Warning Pod myapp-5fdbdc8cd5-sjcxw FailedMount MountVolume.SetUp failed for volume "simplest-service-ca" : configmap "simplest-service-ca" not found kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:41 +0000 UTC Normal ReplicaSet.apps myapp-5fdbdc8cd5 SuccessfulCreate Created pod: myapp-5fdbdc8cd5-sjcxw replicaset-controller logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:41 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-5fdbdc8cd5 to 1 deployment-controller logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:42 +0000 UTC Normal Pod myapp-679f79d5f8-pcrgf.spec.containers{myapp} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 4.038s (4.038s including waiting) kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:42 +0000 UTC Normal Pod myapp-679f79d5f8-pcrgf.spec.containers{myapp} Created Created container myapp kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:42 +0000 UTC Normal Pod myapp-679f79d5f8-pcrgf.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:46 +0000 UTC Normal Pod simplest-5fc9d6d49f-fwxrv Binding Scheduled Successfully assigned kuttl-test-equal-duck/simplest-5fc9d6d49f-fwxrv to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:46 +0000 UTC Normal Pod simplest-5fc9d6d49f-fwxrv AddedInterface Add eth0 [10.128.2.73/23] from ovn-kubernetes logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:46 +0000 UTC Normal ReplicaSet.apps simplest-5fc9d6d49f SuccessfulCreate Created pod: simplest-5fc9d6d49f-fwxrv replicaset-controller logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:46 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-5fc9d6d49f to 1 deployment-controller logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:47 +0000 UTC Normal Pod simplest-5fc9d6d49f-fwxrv.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:47 +0000 UTC Normal Pod simplest-5fc9d6d49f-fwxrv.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:47 +0000 UTC Normal Pod simplest-5fc9d6d49f-fwxrv.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:47 +0000 UTC Normal Pod simplest-5fc9d6d49f-fwxrv.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:47 +0000 UTC Normal Pod simplest-5fc9d6d49f-fwxrv.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:47 +0000 UTC Normal Pod simplest-5fc9d6d49f-fwxrv.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:49 +0000 UTC Normal Pod myapp-5fdbdc8cd5-sjcxw AddedInterface Add eth0 [10.131.0.63/23] from ovn-kubernetes logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:49 +0000 UTC Normal Pod myapp-5fdbdc8cd5-sjcxw.spec.containers{myapp} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:49 +0000 UTC Warning Pod myapp-679f79d5f8-pcrgf.spec.containers{myapp} Unhealthy Liveness probe failed: Get "http://10.128.2.72:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:51 +0000 UTC Normal Pod simplest-5fc9d6d49f-fwxrv.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:51 +0000 UTC Normal Pod simplest-5fc9d6d49f-fwxrv.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:51 +0000 UTC Normal ReplicaSet.apps simplest-5fc9d6d49f SuccessfulDelete Deleted pod: simplest-5fc9d6d49f-fwxrv replicaset-controller logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:51 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-5fc9d6d49f to 0 from 1 deployment-controller logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:52 +0000 UTC Normal Pod simplest-7975f96756-fvpmq Binding Scheduled Successfully assigned kuttl-test-equal-duck/simplest-7975f96756-fvpmq to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:52 +0000 UTC Normal ReplicaSet.apps simplest-7975f96756 SuccessfulCreate Created pod: simplest-7975f96756-fvpmq replicaset-controller logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:52 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-7975f96756 to 1 deployment-controller logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:53 +0000 UTC Normal Pod myapp-5fdbdc8cd5-sjcxw.spec.containers{myapp} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.624s (3.624s including waiting) kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:53 +0000 UTC Normal Pod myapp-5fdbdc8cd5-sjcxw.spec.containers{myapp} Created Created container myapp kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:53 +0000 UTC Normal Pod myapp-5fdbdc8cd5-sjcxw.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:53 +0000 UTC Normal Pod myapp-5fdbdc8cd5-sjcxw.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:53 +0000 UTC Normal Pod myapp-5fdbdc8cd5-sjcxw.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:53 +0000 UTC Normal Pod myapp-5fdbdc8cd5-sjcxw.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:53 +0000 UTC Normal Pod simplest-7975f96756-fvpmq AddedInterface Add eth0 [10.128.2.74/23] from ovn-kubernetes logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:53 +0000 UTC Normal Pod simplest-7975f96756-fvpmq.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:53 +0000 UTC Normal Pod simplest-7975f96756-fvpmq.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:53 +0000 UTC Normal Pod simplest-7975f96756-fvpmq.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:53 +0000 UTC Normal Pod simplest-7975f96756-fvpmq.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:53 +0000 UTC Normal Pod simplest-7975f96756-fvpmq.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:53 +0000 UTC Normal Pod simplest-7975f96756-fvpmq.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:54 +0000 UTC Normal Pod myapp-679f79d5f8-pcrgf.spec.containers{myapp} Killing Stopping container myapp kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:54 +0000 UTC Normal ReplicaSet.apps myapp-679f79d5f8 SuccessfulDelete Deleted pod: myapp-679f79d5f8-pcrgf replicaset-controller logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:54 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled down replica set myapp-679f79d5f8 to 0 from 1 deployment-controller logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:57 +0000 UTC Normal Pod check-span-dlwb9 Binding Scheduled Successfully assigned kuttl-test-equal-duck/check-span-dlwb9 to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:57 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-dlwb9 job-controller logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:57 +0000 UTC Normal Pod report-span-fwxjv Binding Scheduled Successfully assigned kuttl-test-equal-duck/report-span-fwxjv to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:57 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-fwxjv job-controller logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:58 +0000 UTC Normal Pod check-span-dlwb9 AddedInterface Add eth0 [10.129.2.37/23] from ovn-kubernetes logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:58 +0000 UTC Normal Pod check-span-dlwb9.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:58 +0000 UTC Normal Pod check-span-dlwb9.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:58 +0000 UTC Normal Pod check-span-dlwb9.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:58 +0000 UTC Normal Pod report-span-fwxjv AddedInterface Add eth0 [10.129.2.36/23] from ovn-kubernetes logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:58 +0000 UTC Normal Pod report-span-fwxjv.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:58 +0000 UTC Normal Pod report-span-fwxjv.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:20:58 +0000 UTC Normal Pod report-span-fwxjv.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:21:00 +0000 UTC Warning Pod myapp-5fdbdc8cd5-sjcxw.spec.containers{myapp} Unhealthy Liveness probe failed: Get "http://10.131.0.63:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | 2023-12-04 07:21:09 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:21:10 | examples-business-application-injected-sidecar | Deleting namespace: kuttl-test-equal-duck === CONT kuttl/harness/examples-openshift-with-htpasswd logger.go:42: 07:21:16 | examples-openshift-with-htpasswd | Ignoring 00-install.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:21:16 | examples-openshift-with-htpasswd | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:21:16 | examples-openshift-with-htpasswd | Creating namespace: kuttl-test-causal-eel logger.go:42: 07:21:16 | examples-openshift-with-htpasswd/0-install | starting test step 0-install logger.go:42: 07:21:17 | examples-openshift-with-htpasswd/0-install | Secret:kuttl-test-causal-eel/htpasswd created logger.go:42: 07:21:17 | examples-openshift-with-htpasswd/0-install | test step completed 0-install logger.go:42: 07:21:17 | examples-openshift-with-htpasswd/1-install | starting test step 1-install logger.go:42: 07:21:17 | examples-openshift-with-htpasswd/1-install | Jaeger:kuttl-test-causal-eel/with-htpasswd created logger.go:42: 07:21:23 | examples-openshift-with-htpasswd/1-install | test step completed 1-install logger.go:42: 07:21:23 | examples-openshift-with-htpasswd/2-check-unsecured | starting test step 2-check-unsecured logger.go:42: 07:21:23 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [./ensure-ingress-host.sh] logger.go:42: 07:21:23 | examples-openshift-with-htpasswd/2-check-unsecured | Checking the Ingress host value was populated logger.go:42: 07:21:23 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 0 logger.go:42: 07:21:23 | examples-openshift-with-htpasswd/2-check-unsecured | error: error executing jsonpath "{.items[0].status.ingress[0].host}": Error executing template: array index out of bounds: index 0, length 0. Printing more information for debugging the template: logger.go:42: 07:21:23 | examples-openshift-with-htpasswd/2-check-unsecured | template was: logger.go:42: 07:21:23 | examples-openshift-with-htpasswd/2-check-unsecured | {.items[0].status.ingress[0].host} logger.go:42: 07:21:23 | examples-openshift-with-htpasswd/2-check-unsecured | object given to jsonpath engine was: logger.go:42: 07:21:23 | examples-openshift-with-htpasswd/2-check-unsecured | map[string]interface {}{"apiVersion":"v1", "items":[]interface {}{}, "kind":"List", "metadata":map[string]interface {}{"resourceVersion":""}} logger.go:42: 07:21:23 | examples-openshift-with-htpasswd/2-check-unsecured | logger.go:42: 07:21:23 | examples-openshift-with-htpasswd/2-check-unsecured | logger.go:42: 07:21:33 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 1 logger.go:42: 07:21:33 | examples-openshift-with-htpasswd/2-check-unsecured | Hostname is with-htpasswd-kuttl-test-causal-eel.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com logger.go:42: 07:21:33 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 07:21:33 | examples-openshift-with-htpasswd/2-check-unsecured | Checking an expected HTTP response logger.go:42: 07:21:33 | examples-openshift-with-htpasswd/2-check-unsecured | Running in OpenShift logger.go:42: 07:21:33 | examples-openshift-with-htpasswd/2-check-unsecured | Not using any secret logger.go:42: 07:21:33 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 1/30 the https://with-htpasswd-kuttl-test-causal-eel.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:21:33 | examples-openshift-with-htpasswd/2-check-unsecured | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:21:33 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 2/30 the https://with-htpasswd-kuttl-test-causal-eel.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:21:33 | examples-openshift-with-htpasswd/2-check-unsecured | curl response asserted properly logger.go:42: 07:21:33 | examples-openshift-with-htpasswd/2-check-unsecured | test step completed 2-check-unsecured logger.go:42: 07:21:33 | examples-openshift-with-htpasswd/3-check-unauthorized | starting test step 3-check-unauthorized logger.go:42: 07:21:33 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [./ensure-ingress-host.sh] logger.go:42: 07:21:33 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking the Ingress host value was populated logger.go:42: 07:21:33 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 0 logger.go:42: 07:21:33 | examples-openshift-with-htpasswd/3-check-unauthorized | Hostname is with-htpasswd-kuttl-test-causal-eel.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com logger.go:42: 07:21:33 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [sh -c JAEGER_USERNAME=wronguser JAEGER_PASSWORD=wrongpassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking an expected HTTP response logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/3-check-unauthorized | Running in OpenShift logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/3-check-unauthorized | Using Jaeger basic authentication logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 1/30 the https://with-htpasswd-kuttl-test-causal-eel.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/3-check-unauthorized | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 2/30 the https://with-htpasswd-kuttl-test-causal-eel.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/3-check-unauthorized | curl response asserted properly logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/3-check-unauthorized | test step completed 3-check-unauthorized logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/4-check-authorized | starting test step 4-check-authorized logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/4-check-authorized | running command: [./ensure-ingress-host.sh] logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/4-check-authorized | Checking the Ingress host value was populated logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/4-check-authorized | Try number 0 logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/4-check-authorized | Hostname is with-htpasswd-kuttl-test-causal-eel.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/4-check-authorized | running command: [sh -c JAEGER_USERNAME=awesomeuser JAEGER_PASSWORD=awesomepassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE with-htpasswd] logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/4-check-authorized | Checking an expected HTTP response logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/4-check-authorized | Running in OpenShift logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/4-check-authorized | Using Jaeger basic authentication logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/4-check-authorized | Try number 1/30 the https://with-htpasswd-kuttl-test-causal-eel.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/4-check-authorized | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/4-check-authorized | Try number 2/30 the https://with-htpasswd-kuttl-test-causal-eel.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/4-check-authorized | curl response asserted properly logger.go:42: 07:21:34 | examples-openshift-with-htpasswd/4-check-authorized | test step completed 4-check-authorized logger.go:42: 07:21:34 | examples-openshift-with-htpasswd | examples-openshift-with-htpasswd events from ns kuttl-test-causal-eel: logger.go:42: 07:21:34 | examples-openshift-with-htpasswd | 2023-12-04 07:21:20 +0000 UTC Normal Pod with-htpasswd-865f987f96-h7rjm Binding Scheduled Successfully assigned kuttl-test-causal-eel/with-htpasswd-865f987f96-h7rjm to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:21:34 | examples-openshift-with-htpasswd | 2023-12-04 07:21:20 +0000 UTC Warning Pod with-htpasswd-865f987f96-h7rjm FailedMount MountVolume.SetUp failed for volume "with-htpasswd-collector-tls-config-volume" : secret "with-htpasswd-collector-headless-tls" not found kubelet logger.go:42: 07:21:34 | examples-openshift-with-htpasswd | 2023-12-04 07:21:20 +0000 UTC Normal ReplicaSet.apps with-htpasswd-865f987f96 SuccessfulCreate Created pod: with-htpasswd-865f987f96-h7rjm replicaset-controller logger.go:42: 07:21:34 | examples-openshift-with-htpasswd | 2023-12-04 07:21:20 +0000 UTC Normal Deployment.apps with-htpasswd ScalingReplicaSet Scaled up replica set with-htpasswd-865f987f96 to 1 deployment-controller logger.go:42: 07:21:34 | examples-openshift-with-htpasswd | 2023-12-04 07:21:21 +0000 UTC Normal Pod with-htpasswd-865f987f96-h7rjm AddedInterface Add eth0 [10.128.2.75/23] from ovn-kubernetes logger.go:42: 07:21:34 | examples-openshift-with-htpasswd | 2023-12-04 07:21:21 +0000 UTC Normal Pod with-htpasswd-865f987f96-h7rjm.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:21:34 | examples-openshift-with-htpasswd | 2023-12-04 07:21:21 +0000 UTC Normal Pod with-htpasswd-865f987f96-h7rjm.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:21:34 | examples-openshift-with-htpasswd | 2023-12-04 07:21:21 +0000 UTC Normal Pod with-htpasswd-865f987f96-h7rjm.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:21:34 | examples-openshift-with-htpasswd | 2023-12-04 07:21:21 +0000 UTC Normal Pod with-htpasswd-865f987f96-h7rjm.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:21:34 | examples-openshift-with-htpasswd | 2023-12-04 07:21:21 +0000 UTC Normal Pod with-htpasswd-865f987f96-h7rjm.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:21:34 | examples-openshift-with-htpasswd | 2023-12-04 07:21:21 +0000 UTC Normal Pod with-htpasswd-865f987f96-h7rjm.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:21:34 | examples-openshift-with-htpasswd | Deleting namespace: kuttl-test-causal-eel === CONT kuttl/harness/examples-openshift-agent-as-daemonset logger.go:42: 07:21:40 | examples-openshift-agent-as-daemonset | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:21:40 | examples-openshift-agent-as-daemonset | Creating namespace: kuttl-test-still-garfish logger.go:42: 07:21:40 | examples-openshift-agent-as-daemonset/0-install | starting test step 0-install logger.go:42: 07:21:40 | examples-openshift-agent-as-daemonset/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 07:21:40 | examples-openshift-agent-as-daemonset/0-install | ServiceAccount:kuttl-test-still-garfish/jaeger-agent-daemonset created logger.go:42: 07:21:40 | examples-openshift-agent-as-daemonset/0-install | test step completed 0-install logger.go:42: 07:21:40 | examples-openshift-agent-as-daemonset/1-add-policy | starting test step 1-add-policy logger.go:42: 07:21:40 | examples-openshift-agent-as-daemonset/1-add-policy | running command: [sh -c oc adm policy --namespace $NAMESPACE add-scc-to-user daemonset-with-hostport -z jaeger-agent-daemonset] logger.go:42: 07:21:41 | examples-openshift-agent-as-daemonset/1-add-policy | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:daemonset-with-hostport added: "jaeger-agent-daemonset" logger.go:42: 07:21:41 | examples-openshift-agent-as-daemonset/1-add-policy | running command: [sh -c sleep 5] logger.go:42: 07:21:46 | examples-openshift-agent-as-daemonset/1-add-policy | test step completed 1-add-policy logger.go:42: 07:21:46 | examples-openshift-agent-as-daemonset/2-install | starting test step 2-install logger.go:42: 07:21:46 | examples-openshift-agent-as-daemonset/2-install | Jaeger:kuttl-test-still-garfish/agent-as-daemonset created logger.go:42: 07:21:51 | examples-openshift-agent-as-daemonset/2-install | test step completed 2-install logger.go:42: 07:21:51 | examples-openshift-agent-as-daemonset/3-install | starting test step 3-install logger.go:42: 07:21:51 | examples-openshift-agent-as-daemonset/3-install | Deployment:kuttl-test-still-garfish/vertx-create-span-sidecar created logger.go:42: 07:21:56 | examples-openshift-agent-as-daemonset/3-install | test step completed 3-install logger.go:42: 07:21:56 | examples-openshift-agent-as-daemonset/4-find-service | starting test step 4-find-service logger.go:42: 07:21:56 | examples-openshift-agent-as-daemonset/4-find-service | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 07:21:58 | examples-openshift-agent-as-daemonset/4-find-service | Warning: resource jaegers/agent-as-daemonset is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:22:04 | examples-openshift-agent-as-daemonset/4-find-service | running command: [sh -c SERVICE_NAME=order ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JOB_NUMBER=00 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o find-service-00-job.yaml] logger.go:42: 07:22:05 | examples-openshift-agent-as-daemonset/4-find-service | running command: [sh -c kubectl create -f find-service-00-job.yaml -n $NAMESPACE] logger.go:42: 07:22:05 | examples-openshift-agent-as-daemonset/4-find-service | job.batch/00-find-service created logger.go:42: 07:22:32 | examples-openshift-agent-as-daemonset/4-find-service | test step completed 4-find-service logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | examples-openshift-agent-as-daemonset events from ns kuttl-test-still-garfish: logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:49 +0000 UTC Normal Pod agent-as-daemonset-57694774df-6t8bj Binding Scheduled Successfully assigned kuttl-test-still-garfish/agent-as-daemonset-57694774df-6t8bj to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:49 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-57694774df SuccessfulCreate Created pod: agent-as-daemonset-57694774df-6t8bj replicaset-controller logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:49 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-5bdcs Binding Scheduled Successfully assigned kuttl-test-still-garfish/agent-as-daemonset-agent-daemonset-5bdcs to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:49 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-87wcl Binding Scheduled Successfully assigned kuttl-test-still-garfish/agent-as-daemonset-agent-daemonset-87wcl to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:49 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-smwtf Binding Scheduled Successfully assigned kuttl-test-still-garfish/agent-as-daemonset-agent-daemonset-smwtf to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:49 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-5bdcs daemonset-controller logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:49 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-smwtf daemonset-controller logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:49 +0000 UTC Normal DaemonSet.apps agent-as-daemonset-agent-daemonset SuccessfulCreate Created pod: agent-as-daemonset-agent-daemonset-87wcl daemonset-controller logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:49 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-57694774df to 1 deployment-controller logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:50 +0000 UTC Normal Pod agent-as-daemonset-57694774df-6t8bj AddedInterface Add eth0 [10.128.2.76/23] from ovn-kubernetes logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:50 +0000 UTC Normal Pod agent-as-daemonset-57694774df-6t8bj.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:50 +0000 UTC Normal Pod agent-as-daemonset-57694774df-6t8bj.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:50 +0000 UTC Normal Pod agent-as-daemonset-57694774df-6t8bj.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:50 +0000 UTC Normal Pod agent-as-daemonset-57694774df-6t8bj.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:50 +0000 UTC Normal Pod agent-as-daemonset-57694774df-6t8bj.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:50 +0000 UTC Normal Pod agent-as-daemonset-57694774df-6t8bj.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:50 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-5bdcs AddedInterface Add eth0 [10.129.2.38/23] from ovn-kubernetes logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:50 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-5bdcs.spec.containers{jaeger-agent-daemonset} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:50 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-87wcl AddedInterface Add eth0 [10.128.2.77/23] from ovn-kubernetes logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:50 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-87wcl.spec.containers{jaeger-agent-daemonset} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:50 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-87wcl.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:50 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-87wcl.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:50 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-smwtf AddedInterface Add eth0 [10.131.0.64/23] from ovn-kubernetes logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:50 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-smwtf.spec.containers{jaeger-agent-daemonset} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:50 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-smwtf.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:50 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-smwtf.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:51 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-wchw2 Binding Scheduled Successfully assigned kuttl-test-still-garfish/vertx-create-span-sidecar-6c569f6fc6-wchw2 to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:51 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-wchw2 AddedInterface Add eth0 [10.129.2.39/23] from ovn-kubernetes logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:51 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-wchw2.spec.containers{vertx-create-span-sidecar} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:51 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-6c569f6fc6 SuccessfulCreate Created pod: vertx-create-span-sidecar-6c569f6fc6-wchw2 replicaset-controller logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:51 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-6c569f6fc6 to 1 deployment-controller logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:52 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-5bdcs.spec.containers{jaeger-agent-daemonset} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" in 1.999s (1.999s including waiting) kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:52 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-5bdcs.spec.containers{jaeger-agent-daemonset} Created Created container jaeger-agent-daemonset kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:52 +0000 UTC Normal Pod agent-as-daemonset-agent-daemonset-5bdcs.spec.containers{jaeger-agent-daemonset} Started Started container jaeger-agent-daemonset kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:55 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-wchw2.spec.containers{vertx-create-span-sidecar} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.897s (3.897s including waiting) kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:55 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-wchw2.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:55 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-wchw2.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:59 +0000 UTC Normal Pod agent-as-daemonset-57694774df-6t8bj.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:59 +0000 UTC Normal Pod agent-as-daemonset-57694774df-6t8bj.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:59 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-57694774df SuccessfulDelete Deleted pod: agent-as-daemonset-57694774df-6t8bj replicaset-controller logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:21:59 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled down replica set agent-as-daemonset-57694774df to 0 from 1 deployment-controller logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:00 +0000 UTC Normal Pod agent-as-daemonset-77c4bcd7cf-n57fr Binding Scheduled Successfully assigned kuttl-test-still-garfish/agent-as-daemonset-77c4bcd7cf-n57fr to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:00 +0000 UTC Normal Pod agent-as-daemonset-77c4bcd7cf-n57fr AddedInterface Add eth0 [10.128.2.78/23] from ovn-kubernetes logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:00 +0000 UTC Normal Pod agent-as-daemonset-77c4bcd7cf-n57fr.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:00 +0000 UTC Normal Pod agent-as-daemonset-77c4bcd7cf-n57fr.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:00 +0000 UTC Normal Pod agent-as-daemonset-77c4bcd7cf-n57fr.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:00 +0000 UTC Normal Pod agent-as-daemonset-77c4bcd7cf-n57fr.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:00 +0000 UTC Normal Pod agent-as-daemonset-77c4bcd7cf-n57fr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:00 +0000 UTC Normal Pod agent-as-daemonset-77c4bcd7cf-n57fr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:00 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-77c4bcd7cf SuccessfulCreate Created pod: agent-as-daemonset-77c4bcd7cf-n57fr replicaset-controller logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:00 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-77c4bcd7cf to 1 deployment-controller logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:03 +0000 UTC Warning Pod vertx-create-span-sidecar-6c569f6fc6-wchw2.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.39:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:03 +0000 UTC Warning Pod vertx-create-span-sidecar-6c569f6fc6-wchw2.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.129.2.39:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:05 +0000 UTC Normal Pod 00-find-service-sgtsh Binding Scheduled Successfully assigned kuttl-test-still-garfish/00-find-service-sgtsh to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:05 +0000 UTC Normal Pod 00-find-service-sgtsh AddedInterface Add eth0 [10.131.0.65/23] from ovn-kubernetes logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:05 +0000 UTC Normal Pod 00-find-service-sgtsh.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:05 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-sgtsh job-controller logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:06 +0000 UTC Normal Pod 00-find-service-sgtsh.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:06 +0000 UTC Normal Pod 00-find-service-sgtsh.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:06 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-wchw2.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:06 +0000 UTC Warning Pod vertx-create-span-sidecar-6c569f6fc6-wchw2.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.39:8080/": read tcp 10.129.2.2:40630->10.129.2.39:8080: read: connection reset by peer kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:06 +0000 UTC Warning Pod vertx-create-span-sidecar-6c569f6fc6-wchw2.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.39:8080/": dial tcp 10.129.2.39:8080: connect: connection refused kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:06 +0000 UTC Normal Pod vertx-create-span-sidecar-6c569f6fc6-wchw2.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:17 +0000 UTC Warning Pod vertx-create-span-sidecar-6c569f6fc6-wchw2.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.39:8080/": read tcp 10.129.2.2:51112->10.129.2.39:8080: read: connection reset by peer kubelet logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | 2023-12-04 07:22:32 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 07:22:33 | examples-openshift-agent-as-daemonset | Deleting namespace: kuttl-test-still-garfish === CONT kuttl/harness/examples-collector-with-priority-class logger.go:42: 07:22:39 | examples-collector-with-priority-class | Creating namespace: kuttl-test-solid-grubworm logger.go:42: 07:22:40 | examples-collector-with-priority-class/0-install | starting test step 0-install logger.go:42: 07:22:40 | examples-collector-with-priority-class/0-install | PriorityClass:/collector-high-priority created logger.go:42: 07:22:40 | examples-collector-with-priority-class/0-install | Jaeger:kuttl-test-solid-grubworm/collector-with-high-priority created logger.go:42: 07:22:45 | examples-collector-with-priority-class/0-install | test step completed 0-install logger.go:42: 07:22:45 | examples-collector-with-priority-class/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:22:45 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE collector-with-high-priority /dev/null] logger.go:42: 07:22:46 | examples-collector-with-priority-class/1-smoke-test | Warning: resource jaegers/collector-with-high-priority is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:22:53 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:22:53 | examples-collector-with-priority-class/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:22:53 | examples-collector-with-priority-class/1-smoke-test | job.batch/report-span created logger.go:42: 07:22:53 | examples-collector-with-priority-class/1-smoke-test | job.batch/check-span created logger.go:42: 07:23:05 | examples-collector-with-priority-class/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:23:05 | examples-collector-with-priority-class | examples-collector-with-priority-class events from ns kuttl-test-solid-grubworm: logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:43 +0000 UTC Normal Pod collector-with-high-priority-66f694d64c-6b82x Binding Scheduled Successfully assigned kuttl-test-solid-grubworm/collector-with-high-priority-66f694d64c-6b82x to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:43 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-66f694d64c SuccessfulCreate Created pod: collector-with-high-priority-66f694d64c-6b82x replicaset-controller logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:43 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled up replica set collector-with-high-priority-66f694d64c to 1 deployment-controller logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:44 +0000 UTC Normal Pod collector-with-high-priority-66f694d64c-6b82x AddedInterface Add eth0 [10.128.2.79/23] from ovn-kubernetes logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:44 +0000 UTC Normal Pod collector-with-high-priority-66f694d64c-6b82x.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:44 +0000 UTC Normal Pod collector-with-high-priority-66f694d64c-6b82x.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:44 +0000 UTC Normal Pod collector-with-high-priority-66f694d64c-6b82x.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:44 +0000 UTC Normal Pod collector-with-high-priority-66f694d64c-6b82x.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:44 +0000 UTC Normal Pod collector-with-high-priority-66f694d64c-6b82x.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:44 +0000 UTC Normal Pod collector-with-high-priority-66f694d64c-6b82x.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:48 +0000 UTC Normal Pod collector-with-high-priority-66f694d64c-6b82x.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:48 +0000 UTC Normal Pod collector-with-high-priority-66f694d64c-6b82x.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:48 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-66f694d64c SuccessfulDelete Deleted pod: collector-with-high-priority-66f694d64c-6b82x replicaset-controller logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:48 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled down replica set collector-with-high-priority-66f694d64c to 0 from 1 deployment-controller logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:49 +0000 UTC Normal Pod collector-with-high-priority-86cb56cdd5-p9rsr Binding Scheduled Successfully assigned kuttl-test-solid-grubworm/collector-with-high-priority-86cb56cdd5-p9rsr to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:49 +0000 UTC Normal Pod collector-with-high-priority-86cb56cdd5-p9rsr AddedInterface Add eth0 [10.128.2.80/23] from ovn-kubernetes logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:49 +0000 UTC Normal Pod collector-with-high-priority-86cb56cdd5-p9rsr.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:49 +0000 UTC Normal Pod collector-with-high-priority-86cb56cdd5-p9rsr.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:49 +0000 UTC Normal Pod collector-with-high-priority-86cb56cdd5-p9rsr.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:49 +0000 UTC Normal Pod collector-with-high-priority-86cb56cdd5-p9rsr.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:49 +0000 UTC Normal Pod collector-with-high-priority-86cb56cdd5-p9rsr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:49 +0000 UTC Normal Pod collector-with-high-priority-86cb56cdd5-p9rsr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:49 +0000 UTC Normal ReplicaSet.apps collector-with-high-priority-86cb56cdd5 SuccessfulCreate Created pod: collector-with-high-priority-86cb56cdd5-p9rsr replicaset-controller logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:49 +0000 UTC Normal Deployment.apps collector-with-high-priority ScalingReplicaSet Scaled up replica set collector-with-high-priority-86cb56cdd5 to 1 deployment-controller logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:53 +0000 UTC Normal Pod check-span-8w67g Binding Scheduled Successfully assigned kuttl-test-solid-grubworm/check-span-8w67g to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:53 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-8w67g job-controller logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:53 +0000 UTC Normal Pod report-span-zvqjv Binding Scheduled Successfully assigned kuttl-test-solid-grubworm/report-span-zvqjv to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:53 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-zvqjv job-controller logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:54 +0000 UTC Normal Pod check-span-8w67g AddedInterface Add eth0 [10.131.0.66/23] from ovn-kubernetes logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:54 +0000 UTC Normal Pod check-span-8w67g.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:54 +0000 UTC Normal Pod check-span-8w67g.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:54 +0000 UTC Normal Pod check-span-8w67g.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:54 +0000 UTC Normal Pod report-span-zvqjv AddedInterface Add eth0 [10.129.2.40/23] from ovn-kubernetes logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:54 +0000 UTC Normal Pod report-span-zvqjv.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:54 +0000 UTC Normal Pod report-span-zvqjv.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:22:54 +0000 UTC Normal Pod report-span-zvqjv.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:23:05 | examples-collector-with-priority-class | 2023-12-04 07:23:04 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:23:05 | examples-collector-with-priority-class | Deleting namespace: kuttl-test-solid-grubworm === CONT kuttl/harness/examples-all-in-one-with-options logger.go:42: 07:23:17 | examples-all-in-one-with-options | Creating namespace: kuttl-test-dynamic-rhino logger.go:42: 07:23:17 | examples-all-in-one-with-options/0-install | starting test step 0-install logger.go:42: 07:23:17 | examples-all-in-one-with-options/0-install | Jaeger:kuttl-test-dynamic-rhino/my-jaeger created logger.go:42: 07:23:23 | examples-all-in-one-with-options/0-install | test step completed 0-install logger.go:42: 07:23:23 | examples-all-in-one-with-options/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:23:23 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:23:24 | examples-all-in-one-with-options/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:23:31 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443/jaeger MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:23:31 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:23:32 | examples-all-in-one-with-options/1-smoke-test | job.batch/report-span created logger.go:42: 07:23:32 | examples-all-in-one-with-options/1-smoke-test | job.batch/check-span created logger.go:42: 07:23:43 | examples-all-in-one-with-options/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:23:43 | examples-all-in-one-with-options | examples-all-in-one-with-options events from ns kuttl-test-dynamic-rhino: logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:21 +0000 UTC Normal Pod my-jaeger-67b6f4994-b4pgv Binding Scheduled Successfully assigned kuttl-test-dynamic-rhino/my-jaeger-67b6f4994-b4pgv to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:21 +0000 UTC Normal Pod my-jaeger-67b6f4994-b4pgv AddedInterface Add eth0 [10.128.2.81/23] from ovn-kubernetes logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:21 +0000 UTC Normal Pod my-jaeger-67b6f4994-b4pgv.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:21 +0000 UTC Normal Pod my-jaeger-67b6f4994-b4pgv.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:21 +0000 UTC Normal Pod my-jaeger-67b6f4994-b4pgv.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:21 +0000 UTC Normal Pod my-jaeger-67b6f4994-b4pgv.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:21 +0000 UTC Normal ReplicaSet.apps my-jaeger-67b6f4994 SuccessfulCreate Created pod: my-jaeger-67b6f4994-b4pgv replicaset-controller logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:21 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-67b6f4994 to 1 deployment-controller logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:22 +0000 UTC Normal Pod my-jaeger-67b6f4994-b4pgv.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:22 +0000 UTC Normal Pod my-jaeger-67b6f4994-b4pgv.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:28 +0000 UTC Normal Pod my-jaeger-67b6f4994-b4pgv.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:28 +0000 UTC Normal Pod my-jaeger-67b6f4994-b4pgv.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:28 +0000 UTC Normal ReplicaSet.apps my-jaeger-67b6f4994 SuccessfulDelete Deleted pod: my-jaeger-67b6f4994-b4pgv replicaset-controller logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:28 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-67b6f4994 to 0 from 1 deployment-controller logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:29 +0000 UTC Normal Pod my-jaeger-75d457859-qdcgx Binding Scheduled Successfully assigned kuttl-test-dynamic-rhino/my-jaeger-75d457859-qdcgx to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:29 +0000 UTC Normal Pod my-jaeger-75d457859-qdcgx AddedInterface Add eth0 [10.128.2.82/23] from ovn-kubernetes logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:29 +0000 UTC Normal Pod my-jaeger-75d457859-qdcgx.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:29 +0000 UTC Normal Pod my-jaeger-75d457859-qdcgx.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:29 +0000 UTC Normal Pod my-jaeger-75d457859-qdcgx.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:29 +0000 UTC Normal Pod my-jaeger-75d457859-qdcgx.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:29 +0000 UTC Normal Pod my-jaeger-75d457859-qdcgx.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:29 +0000 UTC Normal Pod my-jaeger-75d457859-qdcgx.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:29 +0000 UTC Normal ReplicaSet.apps my-jaeger-75d457859 SuccessfulCreate Created pod: my-jaeger-75d457859-qdcgx replicaset-controller logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:29 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-75d457859 to 1 deployment-controller logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:32 +0000 UTC Normal Pod check-span-ztq4g Binding Scheduled Successfully assigned kuttl-test-dynamic-rhino/check-span-ztq4g to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:32 +0000 UTC Normal Pod check-span-ztq4g AddedInterface Add eth0 [10.131.0.67/23] from ovn-kubernetes logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:32 +0000 UTC Normal Pod check-span-ztq4g.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:32 +0000 UTC Normal Pod check-span-ztq4g.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:32 +0000 UTC Normal Pod check-span-ztq4g.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:32 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-ztq4g job-controller logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:32 +0000 UTC Normal Pod report-span-rzs4m Binding Scheduled Successfully assigned kuttl-test-dynamic-rhino/report-span-rzs4m to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:32 +0000 UTC Normal Pod report-span-rzs4m AddedInterface Add eth0 [10.129.2.41/23] from ovn-kubernetes logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:32 +0000 UTC Normal Pod report-span-rzs4m.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:32 +0000 UTC Normal Pod report-span-rzs4m.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:32 +0000 UTC Normal Pod report-span-rzs4m.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:32 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-rzs4m job-controller logger.go:42: 07:23:43 | examples-all-in-one-with-options | 2023-12-04 07:23:42 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:23:43 | examples-all-in-one-with-options | Deleting namespace: kuttl-test-dynamic-rhino === CONT kuttl/harness/examples-auto-provision-kafka logger.go:42: 07:23:56 | examples-auto-provision-kafka | Creating namespace: kuttl-test-sincere-bedbug logger.go:42: 07:23:56 | examples-auto-provision-kafka/2-install | starting test step 2-install logger.go:42: 07:23:56 | examples-auto-provision-kafka/2-install | Jaeger:kuttl-test-sincere-bedbug/auto-provision-kafka created logger.go:42: 07:25:00 | examples-auto-provision-kafka/2-install | test step completed 2-install logger.go:42: 07:25:00 | examples-auto-provision-kafka/3- | starting test step 3- logger.go:42: 07:25:31 | examples-auto-provision-kafka/3- | test step completed 3- logger.go:42: 07:25:31 | examples-auto-provision-kafka/4- | starting test step 4- logger.go:42: 07:25:52 | examples-auto-provision-kafka/4- | test step completed 4- logger.go:42: 07:25:52 | examples-auto-provision-kafka/5- | starting test step 5- logger.go:42: 07:25:57 | examples-auto-provision-kafka/5- | test step completed 5- logger.go:42: 07:25:57 | examples-auto-provision-kafka/6-smoke-test | starting test step 6-smoke-test logger.go:42: 07:25:57 | examples-auto-provision-kafka/6-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE auto-provision-kafka /dev/null] logger.go:42: 07:25:58 | examples-auto-provision-kafka/6-smoke-test | Warning: resource jaegers/auto-provision-kafka is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:26:04 | examples-auto-provision-kafka/6-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JAEGER_COLLECTOR_ENDPOINT=http://auto-provision-kafka-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://auto-provision-kafka-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:26:05 | examples-auto-provision-kafka/6-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:26:05 | examples-auto-provision-kafka/6-smoke-test | job.batch/report-span created logger.go:42: 07:26:05 | examples-auto-provision-kafka/6-smoke-test | job.batch/check-span created logger.go:42: 07:26:16 | examples-auto-provision-kafka/6-smoke-test | test step completed 6-smoke-test logger.go:42: 07:26:16 | examples-auto-provision-kafka | examples-auto-provision-kafka events from ns kuttl-test-sincere-bedbug: logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:03 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestsincerebedbugautoprovisionka-1-76d989b54d SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestsincerebedbugautoprovisionka-1-tcb7x replicaset-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:03 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsincerebedbugautoprovisionka-1-tcb7x Binding Scheduled Successfully assigned kuttl-test-sincere-bedbug/elasticsearch-cdm-kuttltestsincerebedbugautoprovisionka-1-tcb7x to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:03 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsincerebedbugautoprovisionka-1-tcb7x AddedInterface Add eth0 [10.129.2.42/23] from ovn-kubernetes logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:03 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestsincerebedbugautoprovisionka-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestsincerebedbugautoprovisionka-1-76d989b54d to 1 deployment-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:04 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsincerebedbugautoprovisionka-1-tcb7x.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:04 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsincerebedbugautoprovisionka-1-tcb7x.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:04 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsincerebedbugautoprovisionka-1-tcb7x.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:04 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsincerebedbugautoprovisionka-1-tcb7x.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:04 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsincerebedbugautoprovisionka-1-tcb7x.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:04 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestsincerebedbugautoprovisionka-1-tcb7x.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:19 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestsincerebedbugautoprovisionka-1-tcb7x.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:30 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:31 +0000 UTC Normal PodDisruptionBudget.policy auto-provision-kafka-zookeeper NoPods No matching pods found controllermanager logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:31 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:31 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-sincere-bedbug/data-auto-provision-kafka-zookeeper-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-5dbd99d646-z4wrh_f641fef9-5739-436e-be4f-a8f18fb64662 logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:35 +0000 UTC Normal PersistentVolumeClaim data-auto-provision-kafka-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-37ed88e9-e170-4a09-8245-d98901b6c373 ebs.csi.aws.com_aws-ebs-csi-driver-controller-5dbd99d646-z4wrh_f641fef9-5739-436e-be4f-a8f18fb64662 logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:36 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-sincere-bedbug/auto-provision-kafka-zookeeper-0 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:38 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-37ed88e9-e170-4a09-8245-d98901b6c373" attachdetach-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:40 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0 AddedInterface Add eth0 [10.128.2.83/23] from ovn-kubernetes logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:40 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0.spec.containers{zookeeper} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" already present on machine kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:40 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:24:40 +0000 UTC Normal Pod auto-provision-kafka-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:01 +0000 UTC Normal PodDisruptionBudget.policy auto-provision-kafka-kafka NoPods No matching pods found controllermanager logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:01 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:01 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-sincere-bedbug/data-0-auto-provision-kafka-kafka-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-5dbd99d646-z4wrh_f641fef9-5739-436e-be4f-a8f18fb64662 logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:01 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:06 +0000 UTC Normal Pod auto-provision-kafka-kafka-0 Binding Scheduled Successfully assigned kuttl-test-sincere-bedbug/auto-provision-kafka-kafka-0 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:06 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provision-kafka-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-7e502274-c9bf-4f53-b26c-342981df0d72 ebs.csi.aws.com_aws-ebs-csi-driver-controller-5dbd99d646-z4wrh_f641fef9-5739-436e-be4f-a8f18fb64662 logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:08 +0000 UTC Normal Pod auto-provision-kafka-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-7e502274-c9bf-4f53-b26c-342981df0d72" attachdetach-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:10 +0000 UTC Normal Pod auto-provision-kafka-kafka-0 AddedInterface Add eth0 [10.128.2.84/23] from ovn-kubernetes logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:10 +0000 UTC Normal Pod auto-provision-kafka-kafka-0.spec.containers{kafka} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" already present on machine kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:10 +0000 UTC Normal Pod auto-provision-kafka-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:10 +0000 UTC Normal Pod auto-provision-kafka-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:31 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-68f7cd4758-dxxmc Binding Scheduled Successfully assigned kuttl-test-sincere-bedbug/auto-provision-kafka-entity-operator-68f7cd4758-dxxmc to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:31 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-entity-operator-68f7cd4758 SuccessfulCreate Created pod: auto-provision-kafka-entity-operator-68f7cd4758-dxxmc replicaset-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:31 +0000 UTC Normal Deployment.apps auto-provision-kafka-entity-operator ScalingReplicaSet Scaled up replica set auto-provision-kafka-entity-operator-68f7cd4758 to 1 deployment-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:32 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-68f7cd4758-dxxmc AddedInterface Add eth0 [10.128.2.85/23] from ovn-kubernetes logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:32 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-68f7cd4758-dxxmc.spec.containers{topic-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:464b04e622e0b3472e8a1e1ce8a2efd32cf27fc2056d3d589bfe6b5f9ac0bf4e" already present on machine kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:32 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-68f7cd4758-dxxmc.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:32 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-68f7cd4758-dxxmc.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:32 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-68f7cd4758-dxxmc.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:464b04e622e0b3472e8a1e1ce8a2efd32cf27fc2056d3d589bfe6b5f9ac0bf4e" already present on machine kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:32 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-68f7cd4758-dxxmc.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:32 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-68f7cd4758-dxxmc.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:32 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-68f7cd4758-dxxmc.spec.containers{tls-sidecar} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" already present on machine kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:32 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-68f7cd4758-dxxmc.spec.containers{tls-sidecar} Created Created container tls-sidecar kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:32 +0000 UTC Normal Pod auto-provision-kafka-entity-operator-68f7cd4758-dxxmc.spec.containers{tls-sidecar} Started Started container tls-sidecar kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Pod auto-provision-kafka-collector-95b6ffdb9-6h76p Binding Scheduled Successfully assigned kuttl-test-sincere-bedbug/auto-provision-kafka-collector-95b6ffdb9-6h76p to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Pod auto-provision-kafka-collector-95b6ffdb9-6h76p AddedInterface Add eth0 [10.131.0.68/23] from ovn-kubernetes logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Pod auto-provision-kafka-collector-95b6ffdb9-6h76p.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Pod auto-provision-kafka-collector-95b6ffdb9-6h76p.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Pod auto-provision-kafka-collector-95b6ffdb9-6h76p.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-collector-95b6ffdb9 SuccessfulCreate Created pod: auto-provision-kafka-collector-95b6ffdb9-6h76p replicaset-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Deployment.apps auto-provision-kafka-collector ScalingReplicaSet Scaled up replica set auto-provision-kafka-collector-95b6ffdb9 to 1 deployment-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Pod auto-provision-kafka-ingester-79d9797485-jpstn Binding Scheduled Successfully assigned kuttl-test-sincere-bedbug/auto-provision-kafka-ingester-79d9797485-jpstn to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Pod auto-provision-kafka-ingester-79d9797485-jpstn AddedInterface Add eth0 [10.131.0.70/23] from ovn-kubernetes logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Pod auto-provision-kafka-ingester-79d9797485-jpstn.spec.containers{jaeger-ingester} Pulled Container image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:109a3676bfaccda0021c57bbe82bceed5140faaedb8bad4d7d2be7cd660de039" already present on machine kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Pod auto-provision-kafka-ingester-79d9797485-jpstn.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Pod auto-provision-kafka-ingester-79d9797485-jpstn.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-ingester-79d9797485 SuccessfulCreate Created pod: auto-provision-kafka-ingester-79d9797485-jpstn replicaset-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Deployment.apps auto-provision-kafka-ingester ScalingReplicaSet Scaled up replica set auto-provision-kafka-ingester-79d9797485 to 1 deployment-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Pod auto-provision-kafka-query-794b57bf87-wxdxd Binding Scheduled Successfully assigned kuttl-test-sincere-bedbug/auto-provision-kafka-query-794b57bf87-wxdxd to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Pod auto-provision-kafka-query-794b57bf87-wxdxd AddedInterface Add eth0 [10.131.0.69/23] from ovn-kubernetes logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Pod auto-provision-kafka-query-794b57bf87-wxdxd.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Pod auto-provision-kafka-query-794b57bf87-wxdxd.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Pod auto-provision-kafka-query-794b57bf87-wxdxd.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Pod auto-provision-kafka-query-794b57bf87-wxdxd.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Pod auto-provision-kafka-query-794b57bf87-wxdxd.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Pod auto-provision-kafka-query-794b57bf87-wxdxd.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Pod auto-provision-kafka-query-794b57bf87-wxdxd.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Pod auto-provision-kafka-query-794b57bf87-wxdxd.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Pod auto-provision-kafka-query-794b57bf87-wxdxd.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-query-794b57bf87 SuccessfulCreate Created pod: auto-provision-kafka-query-794b57bf87-wxdxd replicaset-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:25:55 +0000 UTC Normal Deployment.apps auto-provision-kafka-query ScalingReplicaSet Scaled up replica set auto-provision-kafka-query-794b57bf87 to 1 deployment-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:00 +0000 UTC Normal Pod auto-provision-kafka-query-794b57bf87-wxdxd.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:00 +0000 UTC Normal Pod auto-provision-kafka-query-794b57bf87-wxdxd.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:00 +0000 UTC Normal Pod auto-provision-kafka-query-794b57bf87-wxdxd.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:00 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-query-794b57bf87 SuccessfulDelete Deleted pod: auto-provision-kafka-query-794b57bf87-wxdxd replicaset-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:00 +0000 UTC Normal Deployment.apps auto-provision-kafka-query ScalingReplicaSet Scaled down replica set auto-provision-kafka-query-794b57bf87 to 0 from 1 deployment-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:01 +0000 UTC Normal Pod auto-provision-kafka-query-747c69c7fb-579tm Binding Scheduled Successfully assigned kuttl-test-sincere-bedbug/auto-provision-kafka-query-747c69c7fb-579tm to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:01 +0000 UTC Normal ReplicaSet.apps auto-provision-kafka-query-747c69c7fb SuccessfulCreate Created pod: auto-provision-kafka-query-747c69c7fb-579tm replicaset-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:01 +0000 UTC Normal Deployment.apps auto-provision-kafka-query ScalingReplicaSet Scaled up replica set auto-provision-kafka-query-747c69c7fb to 1 deployment-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:02 +0000 UTC Normal Pod auto-provision-kafka-query-747c69c7fb-579tm AddedInterface Add eth0 [10.131.0.71/23] from ovn-kubernetes logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:02 +0000 UTC Normal Pod auto-provision-kafka-query-747c69c7fb-579tm.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:02 +0000 UTC Normal Pod auto-provision-kafka-query-747c69c7fb-579tm.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:02 +0000 UTC Normal Pod auto-provision-kafka-query-747c69c7fb-579tm.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:02 +0000 UTC Normal Pod auto-provision-kafka-query-747c69c7fb-579tm.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:02 +0000 UTC Normal Pod auto-provision-kafka-query-747c69c7fb-579tm.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:02 +0000 UTC Normal Pod auto-provision-kafka-query-747c69c7fb-579tm.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:02 +0000 UTC Normal Pod auto-provision-kafka-query-747c69c7fb-579tm.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:02 +0000 UTC Normal Pod auto-provision-kafka-query-747c69c7fb-579tm.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:02 +0000 UTC Normal Pod auto-provision-kafka-query-747c69c7fb-579tm.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:05 +0000 UTC Normal Pod check-span-q9nll Binding Scheduled Successfully assigned kuttl-test-sincere-bedbug/check-span-q9nll to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:05 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-q9nll job-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:05 +0000 UTC Normal Pod report-span-chhkm Binding Scheduled Successfully assigned kuttl-test-sincere-bedbug/report-span-chhkm to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:05 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-chhkm job-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:06 +0000 UTC Normal Pod check-span-q9nll AddedInterface Add eth0 [10.131.0.72/23] from ovn-kubernetes logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:06 +0000 UTC Normal Pod check-span-q9nll.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:06 +0000 UTC Normal Pod check-span-q9nll.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:06 +0000 UTC Normal Pod check-span-q9nll.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:06 +0000 UTC Normal Pod report-span-chhkm AddedInterface Add eth0 [10.128.2.86/23] from ovn-kubernetes logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:06 +0000 UTC Normal Pod report-span-chhkm.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:06 +0000 UTC Normal Pod report-span-chhkm.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:06 +0000 UTC Normal Pod report-span-chhkm.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:12 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:12 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:12 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:12 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:12 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:12 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provision-kafka-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:26:16 | examples-auto-provision-kafka | 2023-12-04 07:26:16 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:26:16 | examples-auto-provision-kafka | Deleting namespace: kuttl-test-sincere-bedbug === CONT kuttl/harness/examples-agent-with-priority-class logger.go:42: 07:26:59 | examples-agent-with-priority-class | Creating namespace: kuttl-test-allowing-seal logger.go:42: 07:26:59 | examples-agent-with-priority-class/0-install | starting test step 0-install logger.go:42: 07:26:59 | examples-agent-with-priority-class/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 07:26:59 | examples-agent-with-priority-class/0-install | ServiceAccount:kuttl-test-allowing-seal/jaeger-agent-daemonset created logger.go:42: 07:26:59 | examples-agent-with-priority-class/0-install | test step completed 0-install logger.go:42: 07:26:59 | examples-agent-with-priority-class/1-install | starting test step 1-install logger.go:42: 07:26:59 | examples-agent-with-priority-class/1-install | PriorityClass:/high-priority created logger.go:42: 07:26:59 | examples-agent-with-priority-class/1-install | Jaeger:kuttl-test-allowing-seal/agent-as-daemonset created logger.go:42: 07:27:05 | examples-agent-with-priority-class/1-install | test step completed 1-install logger.go:42: 07:27:05 | examples-agent-with-priority-class/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:27:05 | examples-agent-with-priority-class/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 07:27:06 | examples-agent-with-priority-class/2-smoke-test | Warning: resource jaegers/agent-as-daemonset is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:27:13 | examples-agent-with-priority-class/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:27:13 | examples-agent-with-priority-class/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:27:14 | examples-agent-with-priority-class/2-smoke-test | job.batch/report-span created logger.go:42: 07:27:14 | examples-agent-with-priority-class/2-smoke-test | job.batch/check-span created logger.go:42: 07:27:26 | examples-agent-with-priority-class/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:27:26 | examples-agent-with-priority-class | examples-agent-with-priority-class events from ns kuttl-test-allowing-seal: logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:02 +0000 UTC Normal Pod agent-as-daemonset-869ddd9bb5-g7ftk Binding Scheduled Successfully assigned kuttl-test-allowing-seal/agent-as-daemonset-869ddd9bb5-g7ftk to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:02 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-869ddd9bb5 SuccessfulCreate Created pod: agent-as-daemonset-869ddd9bb5-g7ftk replicaset-controller logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:02 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-869ddd9bb5 to 1 deployment-controller logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:03 +0000 UTC Normal Pod agent-as-daemonset-869ddd9bb5-g7ftk AddedInterface Add eth0 [10.128.2.87/23] from ovn-kubernetes logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:03 +0000 UTC Normal Pod agent-as-daemonset-869ddd9bb5-g7ftk.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:03 +0000 UTC Normal Pod agent-as-daemonset-869ddd9bb5-g7ftk.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:03 +0000 UTC Normal Pod agent-as-daemonset-869ddd9bb5-g7ftk.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:03 +0000 UTC Normal Pod agent-as-daemonset-869ddd9bb5-g7ftk.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:03 +0000 UTC Normal Pod agent-as-daemonset-869ddd9bb5-g7ftk.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:03 +0000 UTC Normal Pod agent-as-daemonset-869ddd9bb5-g7ftk.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:05 +0000 UTC Warning DaemonSet.apps agent-as-daemonset-agent-daemonset FailedCreate Error creating: pods "agent-as-daemonset-agent-daemonset-" is forbidden: unable to validate against any security context constraint: [provider "anyuid": Forbidden: not usable by user or serviceaccount, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5775: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5778: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6831: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6832: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 14271: Host ports are not allowed to be used, provider "restricted": Forbidden: not usable by user or serviceaccount, provider "nonroot-v2": Forbidden: not usable by user or serviceaccount, provider "nonroot": Forbidden: not usable by user or serviceaccount, provider "hostmount-anyuid": Forbidden: not usable by user or serviceaccount, provider "elasticsearch-scc": Forbidden: not usable by user or serviceaccount, provider "machine-api-termination-handler": Forbidden: not usable by user or serviceaccount, provider "daemonset-with-hostport": Forbidden: not usable by user or serviceaccount, provider "hostnetwork-v2": Forbidden: not usable by user or serviceaccount, provider "hostnetwork": Forbidden: not usable by user or serviceaccount, provider "hostaccess": Forbidden: not usable by user or serviceaccount, provider "node-exporter": Forbidden: not usable by user or serviceaccount, provider "privileged": Forbidden: not usable by user or serviceaccount] daemonset-controller logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:09 +0000 UTC Normal Pod agent-as-daemonset-869ddd9bb5-g7ftk.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:09 +0000 UTC Normal Pod agent-as-daemonset-869ddd9bb5-g7ftk.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:09 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-869ddd9bb5 SuccessfulDelete Deleted pod: agent-as-daemonset-869ddd9bb5-g7ftk replicaset-controller logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:09 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled down replica set agent-as-daemonset-869ddd9bb5 to 0 from 1 deployment-controller logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:10 +0000 UTC Normal Pod agent-as-daemonset-7686cb8595-lwvjn Binding Scheduled Successfully assigned kuttl-test-allowing-seal/agent-as-daemonset-7686cb8595-lwvjn to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:10 +0000 UTC Normal Pod agent-as-daemonset-7686cb8595-lwvjn AddedInterface Add eth0 [10.128.2.88/23] from ovn-kubernetes logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:10 +0000 UTC Normal Pod agent-as-daemonset-7686cb8595-lwvjn.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:10 +0000 UTC Normal Pod agent-as-daemonset-7686cb8595-lwvjn.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:10 +0000 UTC Normal Pod agent-as-daemonset-7686cb8595-lwvjn.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:10 +0000 UTC Normal Pod agent-as-daemonset-7686cb8595-lwvjn.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:10 +0000 UTC Normal Pod agent-as-daemonset-7686cb8595-lwvjn.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:10 +0000 UTC Normal Pod agent-as-daemonset-7686cb8595-lwvjn.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:10 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-7686cb8595 SuccessfulCreate Created pod: agent-as-daemonset-7686cb8595-lwvjn replicaset-controller logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:10 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-7686cb8595 to 1 deployment-controller logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:14 +0000 UTC Normal Pod check-span-qb58v Binding Scheduled Successfully assigned kuttl-test-allowing-seal/check-span-qb58v to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:14 +0000 UTC Normal Pod check-span-qb58v AddedInterface Add eth0 [10.131.0.73/23] from ovn-kubernetes logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:14 +0000 UTC Normal Pod check-span-qb58v.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:14 +0000 UTC Normal Pod check-span-qb58v.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:14 +0000 UTC Normal Pod check-span-qb58v.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:14 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-qb58v job-controller logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:14 +0000 UTC Normal Pod report-span-sl5dc Binding Scheduled Successfully assigned kuttl-test-allowing-seal/report-span-sl5dc to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:14 +0000 UTC Normal Pod report-span-sl5dc AddedInterface Add eth0 [10.129.2.43/23] from ovn-kubernetes logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:14 +0000 UTC Normal Pod report-span-sl5dc.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:14 +0000 UTC Normal Pod report-span-sl5dc.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:14 +0000 UTC Normal Pod report-span-sl5dc.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:14 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-sl5dc job-controller logger.go:42: 07:27:26 | examples-agent-with-priority-class | 2023-12-04 07:27:25 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:27:26 | examples-agent-with-priority-class | Deleting namespace: kuttl-test-allowing-seal === CONT kuttl/harness/examples-agent-as-daemonset logger.go:42: 07:27:39 | examples-agent-as-daemonset | Creating namespace: kuttl-test-social-ray logger.go:42: 07:27:39 | examples-agent-as-daemonset/0-install | starting test step 0-install logger.go:42: 07:27:39 | examples-agent-as-daemonset/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 07:27:39 | examples-agent-as-daemonset/0-install | ServiceAccount:kuttl-test-social-ray/jaeger-agent-daemonset created logger.go:42: 07:27:39 | examples-agent-as-daemonset/0-install | test step completed 0-install logger.go:42: 07:27:39 | examples-agent-as-daemonset/1-install | starting test step 1-install logger.go:42: 07:27:39 | examples-agent-as-daemonset/1-install | Jaeger:kuttl-test-social-ray/agent-as-daemonset created logger.go:42: 07:27:46 | examples-agent-as-daemonset/1-install | test step completed 1-install logger.go:42: 07:27:46 | examples-agent-as-daemonset/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:27:46 | examples-agent-as-daemonset/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 07:27:48 | examples-agent-as-daemonset/2-smoke-test | Warning: resource jaegers/agent-as-daemonset is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:27:54 | examples-agent-as-daemonset/2-smoke-test | running command: [sh -c ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 07:27:54 | examples-agent-as-daemonset/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:27:55 | examples-agent-as-daemonset/2-smoke-test | job.batch/report-span created logger.go:42: 07:27:55 | examples-agent-as-daemonset/2-smoke-test | job.batch/check-span created logger.go:42: 07:28:06 | examples-agent-as-daemonset/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:28:06 | examples-agent-as-daemonset | examples-agent-as-daemonset events from ns kuttl-test-social-ray: logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:42 +0000 UTC Normal Pod agent-as-daemonset-c4ff6dbb7-8jz2n Binding Scheduled Successfully assigned kuttl-test-social-ray/agent-as-daemonset-c4ff6dbb7-8jz2n to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:42 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-c4ff6dbb7 SuccessfulCreate Created pod: agent-as-daemonset-c4ff6dbb7-8jz2n replicaset-controller logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:42 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-c4ff6dbb7 to 1 deployment-controller logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:43 +0000 UTC Warning DaemonSet.apps agent-as-daemonset-agent-daemonset FailedCreate Error creating: pods "agent-as-daemonset-agent-daemonset-" is forbidden: unable to validate against any security context constraint: [provider "anyuid": Forbidden: not usable by user or serviceaccount, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5775: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5778: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6831: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6832: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 14271: Host ports are not allowed to be used, provider "restricted": Forbidden: not usable by user or serviceaccount, provider "nonroot-v2": Forbidden: not usable by user or serviceaccount, provider "nonroot": Forbidden: not usable by user or serviceaccount, provider "hostmount-anyuid": Forbidden: not usable by user or serviceaccount, provider "elasticsearch-scc": Forbidden: not usable by user or serviceaccount, provider "machine-api-termination-handler": Forbidden: not usable by user or serviceaccount, provider "daemonset-with-hostport": Forbidden: not usable by user or serviceaccount, provider "hostnetwork-v2": Forbidden: not usable by user or serviceaccount, provider "hostnetwork": Forbidden: not usable by user or serviceaccount, provider "hostaccess": Forbidden: not usable by user or serviceaccount, provider "node-exporter": Forbidden: not usable by user or serviceaccount, provider "privileged": Forbidden: not usable by user or serviceaccount] daemonset-controller logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:43 +0000 UTC Warning Pod agent-as-daemonset-c4ff6dbb7-8jz2n FailedMount MountVolume.SetUp failed for volume "agent-as-daemonset-ui-oauth-proxy-tls" : secret "agent-as-daemonset-ui-oauth-proxy-tls" not found kubelet logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:43 +0000 UTC Normal Pod agent-as-daemonset-c4ff6dbb7-8jz2n AddedInterface Add eth0 [10.128.2.89/23] from ovn-kubernetes logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:44 +0000 UTC Normal Pod agent-as-daemonset-c4ff6dbb7-8jz2n.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:44 +0000 UTC Normal Pod agent-as-daemonset-c4ff6dbb7-8jz2n.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:44 +0000 UTC Normal Pod agent-as-daemonset-c4ff6dbb7-8jz2n.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:44 +0000 UTC Normal Pod agent-as-daemonset-c4ff6dbb7-8jz2n.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:44 +0000 UTC Normal Pod agent-as-daemonset-c4ff6dbb7-8jz2n.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:44 +0000 UTC Normal Pod agent-as-daemonset-c4ff6dbb7-8jz2n.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:50 +0000 UTC Normal Pod agent-as-daemonset-c4ff6dbb7-8jz2n.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:50 +0000 UTC Normal Pod agent-as-daemonset-c4ff6dbb7-8jz2n.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:50 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-c4ff6dbb7 SuccessfulDelete Deleted pod: agent-as-daemonset-c4ff6dbb7-8jz2n replicaset-controller logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:50 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled down replica set agent-as-daemonset-c4ff6dbb7 to 0 from 1 deployment-controller logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:51 +0000 UTC Normal Pod agent-as-daemonset-d5c4bb864-hxbmg Binding Scheduled Successfully assigned kuttl-test-social-ray/agent-as-daemonset-d5c4bb864-hxbmg to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:51 +0000 UTC Normal Pod agent-as-daemonset-d5c4bb864-hxbmg AddedInterface Add eth0 [10.128.2.90/23] from ovn-kubernetes logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:51 +0000 UTC Normal Pod agent-as-daemonset-d5c4bb864-hxbmg.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:51 +0000 UTC Normal Pod agent-as-daemonset-d5c4bb864-hxbmg.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:51 +0000 UTC Normal Pod agent-as-daemonset-d5c4bb864-hxbmg.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:51 +0000 UTC Normal Pod agent-as-daemonset-d5c4bb864-hxbmg.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:51 +0000 UTC Normal Pod agent-as-daemonset-d5c4bb864-hxbmg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:51 +0000 UTC Normal Pod agent-as-daemonset-d5c4bb864-hxbmg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:51 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-d5c4bb864 SuccessfulCreate Created pod: agent-as-daemonset-d5c4bb864-hxbmg replicaset-controller logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:51 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-d5c4bb864 to 1 deployment-controller logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:55 +0000 UTC Normal Pod check-span-g4h77 Binding Scheduled Successfully assigned kuttl-test-social-ray/check-span-g4h77 to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:55 +0000 UTC Normal Pod check-span-g4h77 AddedInterface Add eth0 [10.131.0.74/23] from ovn-kubernetes logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:55 +0000 UTC Normal Pod check-span-g4h77.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:55 +0000 UTC Normal Pod check-span-g4h77.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:55 +0000 UTC Normal Pod check-span-g4h77.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:55 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-g4h77 job-controller logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:55 +0000 UTC Normal Pod report-span-q28m2 Binding Scheduled Successfully assigned kuttl-test-social-ray/report-span-q28m2 to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:55 +0000 UTC Normal Pod report-span-q28m2 AddedInterface Add eth0 [10.129.2.44/23] from ovn-kubernetes logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:55 +0000 UTC Normal Pod report-span-q28m2.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:55 +0000 UTC Normal Pod report-span-q28m2.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:55 +0000 UTC Normal Pod report-span-q28m2.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:27:55 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-q28m2 job-controller logger.go:42: 07:28:06 | examples-agent-as-daemonset | 2023-12-04 07:28:06 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:28:06 | examples-agent-as-daemonset | Deleting namespace: kuttl-test-social-ray === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- FAIL: kuttl (1493.43s) --- FAIL: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.12s) --- PASS: kuttl/harness/examples-service-types (53.63s) --- PASS: kuttl/harness/examples-with-sampling (58.86s) --- PASS: kuttl/harness/examples-with-cassandra (68.92s) --- FAIL: kuttl/harness/examples-with-badger-and-volume (629.32s) --- PASS: kuttl/harness/examples-with-badger (39.07s) --- PASS: kuttl/harness/examples-simplest (41.22s) --- PASS: kuttl/harness/examples-simple-prod-with-volumes (71.70s) --- PASS: kuttl/harness/examples-simple-prod (63.80s) --- PASS: kuttl/harness/examples-business-application-injected-sidecar (39.13s) --- PASS: kuttl/harness/examples-openshift-with-htpasswd (23.88s) --- PASS: kuttl/harness/examples-openshift-agent-as-daemonset (59.20s) --- PASS: kuttl/harness/examples-collector-with-priority-class (37.78s) --- PASS: kuttl/harness/examples-all-in-one-with-options (38.56s) --- PASS: kuttl/harness/examples-auto-provision-kafka (182.76s) --- PASS: kuttl/harness/examples-agent-with-priority-class (40.15s) --- PASS: kuttl/harness/examples-agent-as-daemonset (39.17s) FAIL + exit_code=1 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name examples --report --output /logs/artifacts/examples.xml ./artifacts/kuttl-report.xml time="2023-12-04T07:28:18Z" level=debug msg="Setting a new name for the test suites" time="2023-12-04T07:28:18Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-04T07:28:18Z" level=debug msg="normalizing test case names" time="2023-12-04T07:28:18Z" level=debug msg="examples/artifacts -> examples_artifacts" time="2023-12-04T07:28:18Z" level=debug msg="examples/examples-service-types -> examples_examples_service_types" time="2023-12-04T07:28:18Z" level=debug msg="examples/examples-with-sampling -> examples_examples_with_sampling" time="2023-12-04T07:28:18Z" level=debug msg="examples/examples-with-cassandra -> examples_examples_with_cassandra" time="2023-12-04T07:28:18Z" level=debug msg="examples/examples-with-badger-and-volume -> examples_examples_with_badger_and_volume" time="2023-12-04T07:28:18Z" level=debug msg="examples/examples-with-badger -> examples_examples_with_badger" time="2023-12-04T07:28:18Z" level=debug msg="examples/examples-simplest -> examples_examples_simplest" time="2023-12-04T07:28:18Z" level=debug msg="examples/examples-simple-prod-with-volumes -> examples_examples_simple_prod_with_volumes" time="2023-12-04T07:28:18Z" level=debug msg="examples/examples-simple-prod -> examples_examples_simple_prod" time="2023-12-04T07:28:18Z" level=debug msg="examples/examples-business-application-injected-sidecar -> examples_examples_business_application_injected_sidecar" time="2023-12-04T07:28:18Z" level=debug msg="examples/examples-openshift-with-htpasswd -> examples_examples_openshift_with_htpasswd" time="2023-12-04T07:28:18Z" level=debug msg="examples/examples-openshift-agent-as-daemonset -> examples_examples_openshift_agent_as_daemonset" time="2023-12-04T07:28:18Z" level=debug msg="examples/examples-collector-with-priority-class -> examples_examples_collector_with_priority_class" time="2023-12-04T07:28:18Z" level=debug msg="examples/examples-all-in-one-with-options -> examples_examples_all_in_one_with_options" time="2023-12-04T07:28:18Z" level=debug msg="examples/examples-auto-provision-kafka -> examples_examples_auto_provision_kafka" time="2023-12-04T07:28:18Z" level=debug msg="examples/examples-agent-with-priority-class -> examples_examples_agent_with_priority_class" time="2023-12-04T07:28:18Z" level=debug msg="examples/examples-agent-as-daemonset -> examples_examples_agent_as_daemonset" +---------------------------------------------------------+--------+ | NAME | RESULT | +---------------------------------------------------------+--------+ | examples_artifacts | passed | | examples_examples_service_types | passed | | examples_examples_with_sampling | passed | | examples_examples_with_cassandra | passed | | examples_examples_with_badger_and_volume | failed | | examples_examples_with_badger | passed | | examples_examples_simplest | passed | | examples_examples_simple_prod_with_volumes | passed | | examples_examples_simple_prod | passed | | examples_examples_business_application_injected_sidecar | passed | | examples_examples_openshift_with_htpasswd | passed | | examples_examples_openshift_agent_as_daemonset | passed | | examples_examples_collector_with_priority_class | passed | | examples_examples_all_in_one_with_options | passed | | examples_examples_auto_provision_kafka | passed | | examples_examples_agent_with_priority_class | passed | | examples_examples_agent_as_daemonset | passed | +---------------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh generate false true + '[' 3 -ne 3 ']' + test_suite_name=generate + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/generate.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-generate make[2]: Entering directory '/tmp/jaeger-tests' test -s /tmp/jaeger-tests/bin/operator-sdk || curl -sLo /tmp/jaeger-tests/bin/operator-sdk https://github.com/operator-framework/operator-sdk/releases/download/v1.27.0/operator-sdk_`go env GOOS`_`go env GOARCH` ./hack/install/install-golangci-lint.sh Installing golangci-lint golangci-lint 1.53.2 is installed already ./hack/install/install-goimports.sh Installing goimports Try 0... go install golang.org/x/tools/cmd/goimports@v0.1.12 >>>> Formatting code... ./.ci/format.sh >>>> Building... ./hack/install/install-dependencies.sh Installing go dependencies Try 0... go mod download GOOS= GOARCH= CGO_ENABLED=0 GO111MODULE=on go build -ldflags "-X "github.com/jaegertracing/jaeger-operator/pkg/version".version="1.51.0" -X "github.com/jaegertracing/jaeger-operator/pkg/version".buildDate=2023-12-04T07:28:20Z -X "github.com/jaegertracing/jaeger-operator/pkg/version".defaultJaeger="1.51.0"" -o "bin/jaeger-operator" main.go JAEGER_VERSION="1.51.0" ./tests/e2e/generate/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-02-123536 True False 50m Cluster version is 4.15.0-0.nightly-2023-12-02-123536' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-02-123536 True False 50m Cluster version is 4.15.0-0.nightly-2023-12-02-123536' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/generate/render.sh ++ export SUITE_DIR=./tests/e2e/generate ++ SUITE_DIR=./tests/e2e/generate ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/generate ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + '[' true = true ']' + skip_test generate 'This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 2 -ne 2 ']' + test_name=generate + message='This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/generate/_build + '[' _build '!=' _build ']' + rm -rf generate + warning 'generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed\e[0m' WAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running generate E2E tests' Running generate E2E tests + cd tests/e2e/generate/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3107041029 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 07:28:35 | artifacts | Creating namespace: kuttl-test-tolerant-hermit logger.go:42: 07:28:35 | artifacts | artifacts events from ns kuttl-test-tolerant-hermit: logger.go:42: 07:28:35 | artifacts | Deleting namespace: kuttl-test-tolerant-hermit === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (6.49s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.32s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name generate --report --output /logs/artifacts/generate.xml ./artifacts/kuttl-report.xml time="2023-12-04T07:28:42Z" level=debug msg="Setting a new name for the test suites" time="2023-12-04T07:28:42Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-04T07:28:42Z" level=debug msg="normalizing test case names" time="2023-12-04T07:28:42Z" level=debug msg="generate/artifacts -> generate_artifacts" +--------------------+--------+ | NAME | RESULT | +--------------------+--------+ | generate_artifacts | passed | +--------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh miscellaneous false true + '[' 3 -ne 3 ']' + test_suite_name=miscellaneous + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/miscellaneous.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-miscellaneous make[2]: Entering directory '/tmp/jaeger-tests' SKIP_ES_EXTERNAL=true ./tests/e2e/miscellaneous/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-02-123536 True False 50m Cluster version is 4.15.0-0.nightly-2023-12-02-123536' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-02-123536 True False 50m Cluster version is 4.15.0-0.nightly-2023-12-02-123536' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/miscellaneous/render.sh ++ export SUITE_DIR=./tests/e2e/miscellaneous ++ SUITE_DIR=./tests/e2e/miscellaneous ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/miscellaneous ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + '[' true = true ']' + skip_test cassandra-spark 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=cassandra-spark + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + rm -rf cassandra-spark + warning 'cassandra-spark: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: cassandra-spark: Test not supported in OpenShift\e[0m' WAR: cassandra-spark: Test not supported in OpenShift + start_test collector-autoscale + '[' 1 -ne 1 ']' + test_name=collector-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-autoscale\e[0m' Rendering files for test collector-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p collector-autoscale + cd collector-autoscale + jaeger_name=simple-prod + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + ELASTICSEARCH_NODECOUNT=1 + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.resources.requests.memory="200m"' 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.autoscale=true 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.minReplicas=1 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.maxReplicas=2 01-install.yaml + kubectl api-versions + grep autoscaling/v2beta2 -q + rm ./04-assert.yaml + generate_otlp_e2e_tests http + test_protocol=http + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-http\e[0m' Rendering files for test collector-otlp-allinone-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-autoscale + '[' collector-autoscale '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-http + cd collector-otlp-allinone-http + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger http true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-http\e[0m' Rendering files for test collector-otlp-production-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-http + '[' collector-otlp-allinone-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-http + cd collector-otlp-production-http + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger http true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + generate_otlp_e2e_tests grpc + test_protocol=grpc + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-grpc\e[0m' Rendering files for test collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-http + '[' collector-otlp-production-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-grpc + cd collector-otlp-allinone-grpc + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-grpc\e[0m' Rendering files for test collector-otlp-production-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-grpc + '[' collector-otlp-allinone-grpc '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-grpc + cd collector-otlp-production-grpc + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + '[' true = true ']' + skip_test istio 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=istio + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-grpc + '[' collector-otlp-production-grpc '!=' _build ']' + cd .. + rm -rf istio + warning 'istio: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: istio: Test not supported in OpenShift\e[0m' WAR: istio: Test not supported in OpenShift + '[' true = true ']' + skip_test outside-cluster 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=outside-cluster + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + rm -rf outside-cluster + warning 'outside-cluster: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: outside-cluster: Test not supported in OpenShift\e[0m' WAR: outside-cluster: Test not supported in OpenShift + start_test set-custom-img + '[' 1 -ne 1 ']' + test_name=set-custom-img + echo =========================================================================== =========================================================================== + info 'Rendering files for test set-custom-img' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test set-custom-img\e[0m' Rendering files for test set-custom-img + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p set-custom-img + cd set-custom-img + jaeger_name=my-jaeger + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.image="test"' ./02-install.yaml + '[' true = true ']' + skip_test non-cluster-wide 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=non-cluster-wide + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/set-custom-img + '[' set-custom-img '!=' _build ']' + cd .. + rm -rf non-cluster-wide + warning 'non-cluster-wide: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: non-cluster-wide: Test not supported in OpenShift\e[0m' WAR: non-cluster-wide: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running miscellaneous E2E tests' Running miscellaneous E2E tests + cd tests/e2e/miscellaneous/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3107041029 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 7 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/collector-autoscale === PAUSE kuttl/harness/collector-autoscale === RUN kuttl/harness/collector-otlp-allinone-grpc === PAUSE kuttl/harness/collector-otlp-allinone-grpc === RUN kuttl/harness/collector-otlp-allinone-http === PAUSE kuttl/harness/collector-otlp-allinone-http === RUN kuttl/harness/collector-otlp-production-grpc === PAUSE kuttl/harness/collector-otlp-production-grpc === RUN kuttl/harness/collector-otlp-production-http === PAUSE kuttl/harness/collector-otlp-production-http === RUN kuttl/harness/set-custom-img === PAUSE kuttl/harness/set-custom-img === CONT kuttl/harness/artifacts logger.go:42: 07:28:52 | artifacts | Creating namespace: kuttl-test-more-viper logger.go:42: 07:28:52 | artifacts | artifacts events from ns kuttl-test-more-viper: logger.go:42: 07:28:52 | artifacts | Deleting namespace: kuttl-test-more-viper === CONT kuttl/harness/collector-otlp-production-grpc logger.go:42: 07:28:58 | collector-otlp-production-grpc | Creating namespace: kuttl-test-exact-sponge logger.go:42: 07:28:58 | collector-otlp-production-grpc/1-install | starting test step 1-install logger.go:42: 07:28:59 | collector-otlp-production-grpc/1-install | Jaeger:kuttl-test-exact-sponge/my-jaeger created logger.go:42: 07:29:34 | collector-otlp-production-grpc/1-install | test step completed 1-install logger.go:42: 07:29:34 | collector-otlp-production-grpc/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:29:34 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:29:35 | collector-otlp-production-grpc/2-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:29:42 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c REPORTING_PROTOCOL=grpc ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 07:29:42 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:29:42 | collector-otlp-production-grpc/2-smoke-test | job.batch/report-span created logger.go:42: 07:29:42 | collector-otlp-production-grpc/2-smoke-test | job.batch/check-span created logger.go:42: 07:30:02 | collector-otlp-production-grpc/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:30:02 | collector-otlp-production-grpc | collector-otlp-production-grpc events from ns kuttl-test-exact-sponge: logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:04 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestexactspongemyjaeger-1-55bb6c5f4d SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestexactspongemyjaeger-1-55bb6c5f4kscb4 replicaset-controller logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:04 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexactspongemyjaeger-1-55bb6c5f4kscb4 Binding Scheduled Successfully assigned kuttl-test-exact-sponge/elasticsearch-cdm-kuttltestexactspongemyjaeger-1-55bb6c5f4kscb4 to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:04 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestexactspongemyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestexactspongemyjaeger-1-55bb6c5f4d to 1 deployment-controller logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexactspongemyjaeger-1-55bb6c5f4kscb4 AddedInterface Add eth0 [10.129.2.45/23] from ovn-kubernetes logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexactspongemyjaeger-1-55bb6c5f4kscb4.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexactspongemyjaeger-1-55bb6c5f4kscb4.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexactspongemyjaeger-1-55bb6c5f4kscb4.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexactspongemyjaeger-1-55bb6c5f4kscb4.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexactspongemyjaeger-1-55bb6c5f4kscb4.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:05 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestexactspongemyjaeger-1-55bb6c5f4kscb4.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:15 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestexactspongemyjaeger-1-55bb6c5f4kscb4.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:20 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestexactspongemyjaeger-1-55bb6c5f4kscb4.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:31 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-dw2tc Binding Scheduled Successfully assigned kuttl-test-exact-sponge/my-jaeger-collector-5489f5bd9b-dw2tc to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:31 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5489f5bd9b SuccessfulCreate Created pod: my-jaeger-collector-5489f5bd9b-dw2tc replicaset-controller logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:31 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-5489f5bd9b to 1 deployment-controller logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:31 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-744c78487f SuccessfulCreate Created pod: my-jaeger-query-744c78487f-cmcbq replicaset-controller logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:31 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-744c78487f to 1 deployment-controller logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:32 +0000 UTC Warning Pod my-jaeger-collector-5489f5bd9b-dw2tc FailedMount MountVolume.SetUp failed for volume "my-jaeger-collector-tls-config-volume" : secret "my-jaeger-collector-headless-tls" not found kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:32 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-dw2tc AddedInterface Add eth0 [10.128.2.91/23] from ovn-kubernetes logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:32 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-dw2tc.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:32 +0000 UTC Normal Pod my-jaeger-query-744c78487f-cmcbq Binding Scheduled Successfully assigned kuttl-test-exact-sponge/my-jaeger-query-744c78487f-cmcbq to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:32 +0000 UTC Normal Pod my-jaeger-query-744c78487f-cmcbq AddedInterface Add eth0 [10.128.2.92/23] from ovn-kubernetes logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:32 +0000 UTC Normal Pod my-jaeger-query-744c78487f-cmcbq.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:32 +0000 UTC Normal Pod my-jaeger-query-744c78487f-cmcbq.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:32 +0000 UTC Normal Pod my-jaeger-query-744c78487f-cmcbq.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:32 +0000 UTC Normal Pod my-jaeger-query-744c78487f-cmcbq.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:32 +0000 UTC Normal Pod my-jaeger-query-744c78487f-cmcbq.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:32 +0000 UTC Normal Pod my-jaeger-query-744c78487f-cmcbq.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:32 +0000 UTC Normal Pod my-jaeger-query-744c78487f-cmcbq.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:32 +0000 UTC Normal Pod my-jaeger-query-744c78487f-cmcbq.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:32 +0000 UTC Normal Pod my-jaeger-query-744c78487f-cmcbq.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:33 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-dw2tc.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:33 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-dw2tc.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:38 +0000 UTC Normal Pod my-jaeger-query-744c78487f-cmcbq.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:38 +0000 UTC Normal Pod my-jaeger-query-744c78487f-cmcbq.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:38 +0000 UTC Normal Pod my-jaeger-query-744c78487f-cmcbq.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:38 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-744c78487f SuccessfulDelete Deleted pod: my-jaeger-query-744c78487f-cmcbq replicaset-controller logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:38 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-744c78487f to 0 from 1 deployment-controller logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:39 +0000 UTC Normal Pod my-jaeger-query-f595db849-jpnjt Binding Scheduled Successfully assigned kuttl-test-exact-sponge/my-jaeger-query-f595db849-jpnjt to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:39 +0000 UTC Normal Pod my-jaeger-query-f595db849-jpnjt AddedInterface Add eth0 [10.128.2.93/23] from ovn-kubernetes logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:39 +0000 UTC Normal Pod my-jaeger-query-f595db849-jpnjt.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:39 +0000 UTC Normal Pod my-jaeger-query-f595db849-jpnjt.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:39 +0000 UTC Normal Pod my-jaeger-query-f595db849-jpnjt.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:39 +0000 UTC Normal Pod my-jaeger-query-f595db849-jpnjt.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:39 +0000 UTC Normal Pod my-jaeger-query-f595db849-jpnjt.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:39 +0000 UTC Normal Pod my-jaeger-query-f595db849-jpnjt.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:39 +0000 UTC Normal Pod my-jaeger-query-f595db849-jpnjt.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:39 +0000 UTC Normal Pod my-jaeger-query-f595db849-jpnjt.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:39 +0000 UTC Normal Pod my-jaeger-query-f595db849-jpnjt.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:39 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-f595db849 SuccessfulCreate Created pod: my-jaeger-query-f595db849-jpnjt replicaset-controller logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:39 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-f595db849 to 1 deployment-controller logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:42 +0000 UTC Normal Pod check-span-5sttv Binding Scheduled Successfully assigned kuttl-test-exact-sponge/check-span-5sttv to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:42 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-5sttv job-controller logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:42 +0000 UTC Normal Pod report-span-5krww Binding Scheduled Successfully assigned kuttl-test-exact-sponge/report-span-5krww to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:42 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-5krww job-controller logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:43 +0000 UTC Normal Pod check-span-5sttv AddedInterface Add eth0 [10.131.0.76/23] from ovn-kubernetes logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:43 +0000 UTC Normal Pod check-span-5sttv.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:43 +0000 UTC Normal Pod check-span-5sttv.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:43 +0000 UTC Normal Pod check-span-5sttv.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:43 +0000 UTC Normal Pod report-span-5krww AddedInterface Add eth0 [10.131.0.75/23] from ovn-kubernetes logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:43 +0000 UTC Normal Pod report-span-5krww.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:43 +0000 UTC Normal Pod report-span-5krww.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:43 +0000 UTC Normal Pod report-span-5krww.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:29:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:30:02 | collector-otlp-production-grpc | 2023-12-04 07:30:01 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:30:02 | collector-otlp-production-grpc | Deleting namespace: kuttl-test-exact-sponge === CONT kuttl/harness/set-custom-img logger.go:42: 07:30:15 | set-custom-img | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:30:15 | set-custom-img | Ignoring check-collector-img.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:30:15 | set-custom-img | Creating namespace: kuttl-test-top-elephant logger.go:42: 07:30:15 | set-custom-img/1-install | starting test step 1-install logger.go:42: 07:30:15 | set-custom-img/1-install | Jaeger:kuttl-test-top-elephant/my-jaeger created logger.go:42: 07:30:52 | set-custom-img/1-install | test step completed 1-install logger.go:42: 07:30:52 | set-custom-img/2-install | starting test step 2-install logger.go:42: 07:30:52 | set-custom-img/2-install | Jaeger:kuttl-test-top-elephant/my-jaeger updated logger.go:42: 07:30:52 | set-custom-img/2-install | test step completed 2-install logger.go:42: 07:30:52 | set-custom-img/3-check-image | starting test step 3-check-image logger.go:42: 07:30:52 | set-custom-img/3-check-image | running command: [sh -c ./check-collector-img.sh] logger.go:42: 07:30:52 | set-custom-img/3-check-image | Collector image missmatch. Expected: test. Has: registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856 logger.go:42: 07:30:57 | set-custom-img/3-check-image | Collector image asserted properly! logger.go:42: 07:30:58 | set-custom-img/3-check-image | test step completed 3-check-image logger.go:42: 07:30:58 | set-custom-img | set-custom-img events from ns kuttl-test-top-elephant: logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:22 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltesttopelephantmyjaeger-1-5f7f87c8db SuccessfulCreate Created pod: elasticsearch-cdm-kuttltesttopelephantmyjaeger-1-5f7f87c8dvhfdl replicaset-controller logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttopelephantmyjaeger-1-5f7f87c8dvhfdl Binding Scheduled Successfully assigned kuttl-test-top-elephant/elasticsearch-cdm-kuttltesttopelephantmyjaeger-1-5f7f87c8dvhfdl to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttopelephantmyjaeger-1-5f7f87c8dvhfdl AddedInterface Add eth0 [10.129.2.46/23] from ovn-kubernetes logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttopelephantmyjaeger-1-5f7f87c8dvhfdl.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttopelephantmyjaeger-1-5f7f87c8dvhfdl.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttopelephantmyjaeger-1-5f7f87c8dvhfdl.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttopelephantmyjaeger-1-5f7f87c8dvhfdl.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttopelephantmyjaeger-1-5f7f87c8dvhfdl.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:22 +0000 UTC Normal Pod elasticsearch-cdm-kuttltesttopelephantmyjaeger-1-5f7f87c8dvhfdl.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:22 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltesttopelephantmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltesttopelephantmyjaeger-1-5f7f87c8db to 1 deployment-controller logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:32 +0000 UTC Warning Pod elasticsearch-cdm-kuttltesttopelephantmyjaeger-1-5f7f87c8dvhfdl.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:37 +0000 UTC Warning Pod elasticsearch-cdm-kuttltesttopelephantmyjaeger-1-5f7f87c8dvhfdl.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:49 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-vtdjb Binding Scheduled Successfully assigned kuttl-test-top-elephant/my-jaeger-collector-5489f5bd9b-vtdjb to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:49 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-vtdjb AddedInterface Add eth0 [10.128.2.95/23] from ovn-kubernetes logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:49 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-vtdjb.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:49 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-vtdjb.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:49 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-vtdjb.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:49 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5489f5bd9b SuccessfulCreate Created pod: my-jaeger-collector-5489f5bd9b-vtdjb replicaset-controller logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:49 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-5489f5bd9b to 1 deployment-controller logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:49 +0000 UTC Normal Pod my-jaeger-query-767455b774-fvptr Binding Scheduled Successfully assigned kuttl-test-top-elephant/my-jaeger-query-767455b774-fvptr to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:49 +0000 UTC Warning Pod my-jaeger-query-767455b774-fvptr FailedMount MountVolume.SetUp failed for volume "my-jaeger-ui-oauth-proxy-tls" : secret "my-jaeger-ui-oauth-proxy-tls" not found kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:49 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-767455b774 SuccessfulCreate Created pod: my-jaeger-query-767455b774-fvptr replicaset-controller logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:49 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-767455b774 to 1 deployment-controller logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:50 +0000 UTC Normal Pod my-jaeger-query-767455b774-fvptr AddedInterface Add eth0 [10.128.2.96/23] from ovn-kubernetes logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:50 +0000 UTC Normal Pod my-jaeger-query-767455b774-fvptr.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:50 +0000 UTC Normal Pod my-jaeger-query-767455b774-fvptr.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:50 +0000 UTC Normal Pod my-jaeger-query-767455b774-fvptr.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:50 +0000 UTC Normal Pod my-jaeger-query-767455b774-fvptr.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:50 +0000 UTC Normal Pod my-jaeger-query-767455b774-fvptr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:50 +0000 UTC Normal Pod my-jaeger-query-767455b774-fvptr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:50 +0000 UTC Normal Pod my-jaeger-query-767455b774-fvptr.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:50 +0000 UTC Normal Pod my-jaeger-query-767455b774-fvptr.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:50 +0000 UTC Normal Pod my-jaeger-query-767455b774-fvptr.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:54 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-vtdjb.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:54 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5489f5bd9b SuccessfulDelete Deleted pod: my-jaeger-collector-5489f5bd9b-vtdjb replicaset-controller logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:54 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled down replica set my-jaeger-collector-5489f5bd9b to 0 from 1 deployment-controller logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:55 +0000 UTC Normal Pod my-jaeger-collector-7fd96ccd44-t4llz Binding Scheduled Successfully assigned kuttl-test-top-elephant/my-jaeger-collector-7fd96ccd44-t4llz to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:55 +0000 UTC Normal Pod my-jaeger-collector-7fd96ccd44-t4llz AddedInterface Add eth0 [10.131.0.77/23] from ovn-kubernetes logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:55 +0000 UTC Normal Pod my-jaeger-collector-7fd96ccd44-t4llz.spec.containers{jaeger-collector} Pulling Pulling image "test" kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:55 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-7fd96ccd44 SuccessfulCreate Created pod: my-jaeger-collector-7fd96ccd44-t4llz replicaset-controller logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:55 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-7fd96ccd44 to 1 deployment-controller logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:56 +0000 UTC Warning Pod my-jaeger-collector-7fd96ccd44-t4llz.spec.containers{jaeger-collector} Failed Failed to pull image "test": reading manifest latest in docker.io/library/test: requested access to the resource is denied kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:56 +0000 UTC Warning Pod my-jaeger-collector-7fd96ccd44-t4llz.spec.containers{jaeger-collector} Failed Error: ErrImagePull kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:57 +0000 UTC Normal Pod my-jaeger-collector-7fd96ccd44-t4llz.spec.containers{jaeger-collector} BackOff Back-off pulling image "test" kubelet logger.go:42: 07:30:58 | set-custom-img | 2023-12-04 07:30:57 +0000 UTC Warning Pod my-jaeger-collector-7fd96ccd44-t4llz.spec.containers{jaeger-collector} Failed Error: ImagePullBackOff kubelet logger.go:42: 07:30:58 | set-custom-img | Deleting namespace: kuttl-test-top-elephant === CONT kuttl/harness/collector-otlp-production-http logger.go:42: 07:31:04 | collector-otlp-production-http | Creating namespace: kuttl-test-enough-spider logger.go:42: 07:31:04 | collector-otlp-production-http/1-install | starting test step 1-install logger.go:42: 07:31:04 | collector-otlp-production-http/1-install | Jaeger:kuttl-test-enough-spider/my-jaeger created logger.go:42: 07:31:40 | collector-otlp-production-http/1-install | test step completed 1-install logger.go:42: 07:31:40 | collector-otlp-production-http/2-smoke-test | starting test step 2-smoke-test logger.go:42: 07:31:40 | collector-otlp-production-http/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:31:41 | collector-otlp-production-http/2-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:31:47 | collector-otlp-production-http/2-smoke-test | running command: [sh -c REPORTING_PROTOCOL=http ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 07:31:48 | collector-otlp-production-http/2-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:31:48 | collector-otlp-production-http/2-smoke-test | job.batch/report-span created logger.go:42: 07:31:48 | collector-otlp-production-http/2-smoke-test | job.batch/check-span created logger.go:42: 07:32:01 | collector-otlp-production-http/2-smoke-test | test step completed 2-smoke-test logger.go:42: 07:32:01 | collector-otlp-production-http | collector-otlp-production-http events from ns kuttl-test-enough-spider: logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:10 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestenoughspidermyjaeger-1-c6cc5cb9b SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestenoughspidermyjaeger-1-c6cc5cb9qkh9h replicaset-controller logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:10 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestenoughspidermyjaeger-1-c6cc5cb9qkh9h Binding Scheduled Successfully assigned kuttl-test-enough-spider/elasticsearch-cdm-kuttltestenoughspidermyjaeger-1-c6cc5cb9qkh9h to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:10 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestenoughspidermyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestenoughspidermyjaeger-1-c6cc5cb9b to 1 deployment-controller logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:11 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestenoughspidermyjaeger-1-c6cc5cb9qkh9h FailedMount MountVolume.SetUp failed for volume "elasticsearch-metrics" : secret "elasticsearch-metrics" not found kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:11 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestenoughspidermyjaeger-1-c6cc5cb9qkh9h AddedInterface Add eth0 [10.129.2.47/23] from ovn-kubernetes logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:11 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestenoughspidermyjaeger-1-c6cc5cb9qkh9h.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:11 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestenoughspidermyjaeger-1-c6cc5cb9qkh9h.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:11 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestenoughspidermyjaeger-1-c6cc5cb9qkh9h.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:11 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestenoughspidermyjaeger-1-c6cc5cb9qkh9h.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:12 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestenoughspidermyjaeger-1-c6cc5cb9qkh9h.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:12 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestenoughspidermyjaeger-1-c6cc5cb9qkh9h.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:21 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestenoughspidermyjaeger-1-c6cc5cb9qkh9h.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:26 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestenoughspidermyjaeger-1-c6cc5cb9qkh9h.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:37 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-5gfbt Binding Scheduled Successfully assigned kuttl-test-enough-spider/my-jaeger-collector-5489f5bd9b-5gfbt to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:37 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-5gfbt AddedInterface Add eth0 [10.128.2.97/23] from ovn-kubernetes logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:37 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5489f5bd9b SuccessfulCreate Created pod: my-jaeger-collector-5489f5bd9b-5gfbt replicaset-controller logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:37 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-5489f5bd9b to 1 deployment-controller logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:37 +0000 UTC Normal Pod my-jaeger-query-5c5fc9c598-nqzjg Binding Scheduled Successfully assigned kuttl-test-enough-spider/my-jaeger-query-5c5fc9c598-nqzjg to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:37 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-5c5fc9c598 SuccessfulCreate Created pod: my-jaeger-query-5c5fc9c598-nqzjg replicaset-controller logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:37 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-5c5fc9c598 to 1 deployment-controller logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:38 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-5gfbt.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:38 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-5gfbt.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:38 +0000 UTC Normal Pod my-jaeger-collector-5489f5bd9b-5gfbt.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:38 +0000 UTC Normal Pod my-jaeger-query-5c5fc9c598-nqzjg AddedInterface Add eth0 [10.128.2.98/23] from ovn-kubernetes logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:38 +0000 UTC Normal Pod my-jaeger-query-5c5fc9c598-nqzjg.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:38 +0000 UTC Normal Pod my-jaeger-query-5c5fc9c598-nqzjg.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:38 +0000 UTC Normal Pod my-jaeger-query-5c5fc9c598-nqzjg.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:38 +0000 UTC Normal Pod my-jaeger-query-5c5fc9c598-nqzjg.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:38 +0000 UTC Normal Pod my-jaeger-query-5c5fc9c598-nqzjg.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:38 +0000 UTC Normal Pod my-jaeger-query-5c5fc9c598-nqzjg.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:38 +0000 UTC Normal Pod my-jaeger-query-5c5fc9c598-nqzjg.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:38 +0000 UTC Normal Pod my-jaeger-query-5c5fc9c598-nqzjg.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:38 +0000 UTC Normal Pod my-jaeger-query-5c5fc9c598-nqzjg.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:42 +0000 UTC Normal Pod my-jaeger-query-5c5fc9c598-nqzjg.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:42 +0000 UTC Normal Pod my-jaeger-query-5c5fc9c598-nqzjg.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:42 +0000 UTC Normal Pod my-jaeger-query-5c5fc9c598-nqzjg.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:42 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-5c5fc9c598 SuccessfulDelete Deleted pod: my-jaeger-query-5c5fc9c598-nqzjg replicaset-controller logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:42 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-5c5fc9c598 to 0 from 1 deployment-controller logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:43 +0000 UTC Normal Pod my-jaeger-query-79c764c464-qdszp Binding Scheduled Successfully assigned kuttl-test-enough-spider/my-jaeger-query-79c764c464-qdszp to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:43 +0000 UTC Normal Pod my-jaeger-query-79c764c464-qdszp AddedInterface Add eth0 [10.128.2.99/23] from ovn-kubernetes logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:43 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-79c764c464 SuccessfulCreate Created pod: my-jaeger-query-79c764c464-qdszp replicaset-controller logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:43 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-79c764c464 to 1 deployment-controller logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:44 +0000 UTC Normal Pod my-jaeger-query-79c764c464-qdszp.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:44 +0000 UTC Normal Pod my-jaeger-query-79c764c464-qdszp.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:44 +0000 UTC Normal Pod my-jaeger-query-79c764c464-qdszp.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:44 +0000 UTC Normal Pod my-jaeger-query-79c764c464-qdszp.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:44 +0000 UTC Normal Pod my-jaeger-query-79c764c464-qdszp.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:44 +0000 UTC Normal Pod my-jaeger-query-79c764c464-qdszp.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:44 +0000 UTC Normal Pod my-jaeger-query-79c764c464-qdszp.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:44 +0000 UTC Normal Pod my-jaeger-query-79c764c464-qdszp.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:44 +0000 UTC Normal Pod my-jaeger-query-79c764c464-qdszp.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:48 +0000 UTC Normal Pod check-span-7mpvt Binding Scheduled Successfully assigned kuttl-test-enough-spider/check-span-7mpvt to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:48 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-7mpvt job-controller logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:48 +0000 UTC Normal Pod report-span-78rzx Binding Scheduled Successfully assigned kuttl-test-enough-spider/report-span-78rzx to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:48 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-78rzx job-controller logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:49 +0000 UTC Normal Pod check-span-7mpvt AddedInterface Add eth0 [10.131.0.79/23] from ovn-kubernetes logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:49 +0000 UTC Normal Pod check-span-7mpvt.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:49 +0000 UTC Normal Pod check-span-7mpvt.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:49 +0000 UTC Normal Pod check-span-7mpvt.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:49 +0000 UTC Normal Pod report-span-78rzx AddedInterface Add eth0 [10.131.0.78/23] from ovn-kubernetes logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:49 +0000 UTC Normal Pod report-span-78rzx.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:49 +0000 UTC Normal Pod report-span-78rzx.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:49 +0000 UTC Normal Pod report-span-78rzx.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:52 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:52 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:31:52 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:32:01 | collector-otlp-production-http | 2023-12-04 07:32:00 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:32:01 | collector-otlp-production-http | Deleting namespace: kuttl-test-enough-spider === CONT kuttl/harness/collector-otlp-allinone-grpc logger.go:42: 07:32:13 | collector-otlp-allinone-grpc | Creating namespace: kuttl-test-novel-panther logger.go:42: 07:32:13 | collector-otlp-allinone-grpc/0-install | starting test step 0-install logger.go:42: 07:32:13 | collector-otlp-allinone-grpc/0-install | Jaeger:kuttl-test-novel-panther/my-jaeger created logger.go:42: 07:32:20 | collector-otlp-allinone-grpc/0-install | test step completed 0-install logger.go:42: 07:32:20 | collector-otlp-allinone-grpc/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:32:20 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:32:21 | collector-otlp-allinone-grpc/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:32:27 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=grpc ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 07:32:28 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:32:28 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/report-span created logger.go:42: 07:32:28 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/check-span created logger.go:42: 07:32:49 | collector-otlp-allinone-grpc/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | collector-otlp-allinone-grpc events from ns kuttl-test-novel-panther: logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:17 +0000 UTC Normal Pod my-jaeger-65dd444864-cfvf6 Binding Scheduled Successfully assigned kuttl-test-novel-panther/my-jaeger-65dd444864-cfvf6 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:17 +0000 UTC Normal Pod my-jaeger-65dd444864-cfvf6 AddedInterface Add eth0 [10.128.2.100/23] from ovn-kubernetes logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:17 +0000 UTC Normal Pod my-jaeger-65dd444864-cfvf6.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:17 +0000 UTC Normal Pod my-jaeger-65dd444864-cfvf6.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:17 +0000 UTC Normal Pod my-jaeger-65dd444864-cfvf6.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:17 +0000 UTC Normal Pod my-jaeger-65dd444864-cfvf6.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:17 +0000 UTC Normal ReplicaSet.apps my-jaeger-65dd444864 SuccessfulCreate Created pod: my-jaeger-65dd444864-cfvf6 replicaset-controller logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:17 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-65dd444864 to 1 deployment-controller logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:18 +0000 UTC Normal Pod my-jaeger-65dd444864-cfvf6.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:18 +0000 UTC Normal Pod my-jaeger-65dd444864-cfvf6.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:22 +0000 UTC Normal Pod my-jaeger-65dd444864-cfvf6.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:22 +0000 UTC Normal Pod my-jaeger-65dd444864-cfvf6.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:22 +0000 UTC Normal ReplicaSet.apps my-jaeger-65dd444864 SuccessfulDelete Deleted pod: my-jaeger-65dd444864-cfvf6 replicaset-controller logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:22 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-65dd444864 to 0 from 1 deployment-controller logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:23 +0000 UTC Normal Pod my-jaeger-7d4b874fd-q8lmk Binding Scheduled Successfully assigned kuttl-test-novel-panther/my-jaeger-7d4b874fd-q8lmk to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:23 +0000 UTC Normal ReplicaSet.apps my-jaeger-7d4b874fd SuccessfulCreate Created pod: my-jaeger-7d4b874fd-q8lmk replicaset-controller logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:23 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-7d4b874fd to 1 deployment-controller logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:24 +0000 UTC Normal Pod my-jaeger-7d4b874fd-q8lmk AddedInterface Add eth0 [10.128.2.101/23] from ovn-kubernetes logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:24 +0000 UTC Normal Pod my-jaeger-7d4b874fd-q8lmk.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:24 +0000 UTC Normal Pod my-jaeger-7d4b874fd-q8lmk.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:24 +0000 UTC Normal Pod my-jaeger-7d4b874fd-q8lmk.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:24 +0000 UTC Normal Pod my-jaeger-7d4b874fd-q8lmk.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:24 +0000 UTC Normal Pod my-jaeger-7d4b874fd-q8lmk.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:24 +0000 UTC Normal Pod my-jaeger-7d4b874fd-q8lmk.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:28 +0000 UTC Normal Pod check-span-fkh6t Binding Scheduled Successfully assigned kuttl-test-novel-panther/check-span-fkh6t to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:28 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-fkh6t job-controller logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:28 +0000 UTC Normal Pod report-span-9kpbx Binding Scheduled Successfully assigned kuttl-test-novel-panther/report-span-9kpbx to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:28 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-9kpbx job-controller logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:29 +0000 UTC Normal Pod check-span-fkh6t AddedInterface Add eth0 [10.131.0.80/23] from ovn-kubernetes logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:29 +0000 UTC Normal Pod check-span-fkh6t.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:29 +0000 UTC Normal Pod check-span-fkh6t.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:29 +0000 UTC Normal Pod check-span-fkh6t.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:29 +0000 UTC Normal Pod report-span-9kpbx AddedInterface Add eth0 [10.129.2.48/23] from ovn-kubernetes logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:29 +0000 UTC Normal Pod report-span-9kpbx.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:29 +0000 UTC Normal Pod report-span-9kpbx.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:29 +0000 UTC Normal Pod report-span-9kpbx.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | 2023-12-04 07:32:48 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:32:49 | collector-otlp-allinone-grpc | Deleting namespace: kuttl-test-novel-panther === CONT kuttl/harness/collector-otlp-allinone-http logger.go:42: 07:33:02 | collector-otlp-allinone-http | Creating namespace: kuttl-test-enormous-pika logger.go:42: 07:33:02 | collector-otlp-allinone-http/0-install | starting test step 0-install logger.go:42: 07:33:02 | collector-otlp-allinone-http/0-install | Jaeger:kuttl-test-enormous-pika/my-jaeger created logger.go:42: 07:33:08 | collector-otlp-allinone-http/0-install | test step completed 0-install logger.go:42: 07:33:08 | collector-otlp-allinone-http/1-smoke-test | starting test step 1-smoke-test logger.go:42: 07:33:08 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 07:33:09 | collector-otlp-allinone-http/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 07:33:16 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=http ASSERT_IMG=registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3 OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 07:33:16 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 07:33:16 | collector-otlp-allinone-http/1-smoke-test | job.batch/report-span created logger.go:42: 07:33:16 | collector-otlp-allinone-http/1-smoke-test | job.batch/check-span created logger.go:42: 07:33:29 | collector-otlp-allinone-http/1-smoke-test | test step completed 1-smoke-test logger.go:42: 07:33:29 | collector-otlp-allinone-http | collector-otlp-allinone-http events from ns kuttl-test-enormous-pika: logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:05 +0000 UTC Normal Pod my-jaeger-794c4bbc5d-s674j Binding Scheduled Successfully assigned kuttl-test-enormous-pika/my-jaeger-794c4bbc5d-s674j to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:05 +0000 UTC Normal ReplicaSet.apps my-jaeger-794c4bbc5d SuccessfulCreate Created pod: my-jaeger-794c4bbc5d-s674j replicaset-controller logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:05 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-794c4bbc5d to 1 deployment-controller logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:06 +0000 UTC Normal Pod my-jaeger-794c4bbc5d-s674j AddedInterface Add eth0 [10.128.2.102/23] from ovn-kubernetes logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:06 +0000 UTC Normal Pod my-jaeger-794c4bbc5d-s674j.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:06 +0000 UTC Normal Pod my-jaeger-794c4bbc5d-s674j.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:06 +0000 UTC Normal Pod my-jaeger-794c4bbc5d-s674j.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:06 +0000 UTC Normal Pod my-jaeger-794c4bbc5d-s674j.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:06 +0000 UTC Normal Pod my-jaeger-794c4bbc5d-s674j.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:06 +0000 UTC Normal Pod my-jaeger-794c4bbc5d-s674j.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:12 +0000 UTC Normal Pod my-jaeger-794c4bbc5d-s674j.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:12 +0000 UTC Normal Pod my-jaeger-794c4bbc5d-s674j.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:12 +0000 UTC Normal ReplicaSet.apps my-jaeger-794c4bbc5d SuccessfulDelete Deleted pod: my-jaeger-794c4bbc5d-s674j replicaset-controller logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:12 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-794c4bbc5d to 0 from 1 deployment-controller logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:13 +0000 UTC Normal Pod my-jaeger-78cdc58dbc-8bgb6 Binding Scheduled Successfully assigned kuttl-test-enormous-pika/my-jaeger-78cdc58dbc-8bgb6 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:13 +0000 UTC Normal ReplicaSet.apps my-jaeger-78cdc58dbc SuccessfulCreate Created pod: my-jaeger-78cdc58dbc-8bgb6 replicaset-controller logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:13 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-78cdc58dbc to 1 deployment-controller logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:14 +0000 UTC Normal Pod my-jaeger-78cdc58dbc-8bgb6 AddedInterface Add eth0 [10.128.2.103/23] from ovn-kubernetes logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:14 +0000 UTC Normal Pod my-jaeger-78cdc58dbc-8bgb6.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:14 +0000 UTC Normal Pod my-jaeger-78cdc58dbc-8bgb6.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:14 +0000 UTC Normal Pod my-jaeger-78cdc58dbc-8bgb6.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:14 +0000 UTC Normal Pod my-jaeger-78cdc58dbc-8bgb6.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:14 +0000 UTC Normal Pod my-jaeger-78cdc58dbc-8bgb6.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:14 +0000 UTC Normal Pod my-jaeger-78cdc58dbc-8bgb6.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:16 +0000 UTC Normal Pod check-span-r6264 Binding Scheduled Successfully assigned kuttl-test-enormous-pika/check-span-r6264 to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:16 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-r6264 job-controller logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:16 +0000 UTC Normal Pod report-span-gdgl8 Binding Scheduled Successfully assigned kuttl-test-enormous-pika/report-span-gdgl8 to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:16 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-gdgl8 job-controller logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:17 +0000 UTC Normal Pod check-span-r6264 AddedInterface Add eth0 [10.131.0.81/23] from ovn-kubernetes logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:17 +0000 UTC Normal Pod check-span-r6264.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:17 +0000 UTC Normal Pod check-span-r6264.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:17 +0000 UTC Normal Pod check-span-r6264.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:17 +0000 UTC Normal Pod report-span-gdgl8 AddedInterface Add eth0 [10.129.2.49/23] from ovn-kubernetes logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:17 +0000 UTC Normal Pod report-span-gdgl8.spec.containers{report-span} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:17 +0000 UTC Normal Pod report-span-gdgl8.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:17 +0000 UTC Normal Pod report-span-gdgl8.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 07:33:29 | collector-otlp-allinone-http | 2023-12-04 07:33:28 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 07:33:29 | collector-otlp-allinone-http | Deleting namespace: kuttl-test-enormous-pika === CONT kuttl/harness/collector-autoscale logger.go:42: 07:33:41 | collector-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:33:41 | collector-autoscale | Ignoring wait-for-hpa.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:33:41 | collector-autoscale | Creating namespace: kuttl-test-pure-sunfish logger.go:42: 07:33:41 | collector-autoscale/1-install | starting test step 1-install logger.go:42: 07:33:41 | collector-autoscale/1-install | Jaeger:kuttl-test-pure-sunfish/simple-prod created logger.go:42: 07:34:18 | collector-autoscale/1-install | test step completed 1-install logger.go:42: 07:34:18 | collector-autoscale/2-wait-for-hpa | starting test step 2-wait-for-hpa logger.go:42: 07:34:18 | collector-autoscale/2-wait-for-hpa | running command: [sh -c ./wait-for-hpa.sh] logger.go:42: 07:34:18 | collector-autoscale/2-wait-for-hpa | Some HPA metrics are not known yet logger.go:42: 07:34:19 | collector-autoscale/2-wait-for-hpa | test step completed 2-wait-for-hpa logger.go:42: 07:34:19 | collector-autoscale/3- | starting test step 3- logger.go:42: 07:34:19 | collector-autoscale/3- | test step completed 3- logger.go:42: 07:34:19 | collector-autoscale | collector-autoscale events from ns kuttl-test-pure-sunfish: logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:33:48 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestpuresunfishsimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestpuresunfishsimpleprod-1-5bdfff48c4 to 1 deployment-controller logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:33:49 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestpuresunfishsimpleprod-1-5bdfff48c4 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestpuresunfishsimpleprod-1-5bdfff4mz5wt replicaset-controller logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:33:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpuresunfishsimpleprod-1-5bdfff4mz5wt Binding Scheduled Successfully assigned kuttl-test-pure-sunfish/elasticsearch-cdm-kuttltestpuresunfishsimpleprod-1-5bdfff4mz5wt to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:33:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpuresunfishsimpleprod-1-5bdfff4mz5wt AddedInterface Add eth0 [10.129.2.50/23] from ovn-kubernetes logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:33:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpuresunfishsimpleprod-1-5bdfff4mz5wt.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:33:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpuresunfishsimpleprod-1-5bdfff4mz5wt.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:33:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpuresunfishsimpleprod-1-5bdfff4mz5wt.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:33:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpuresunfishsimpleprod-1-5bdfff4mz5wt.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:33:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpuresunfishsimpleprod-1-5bdfff4mz5wt.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:33:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestpuresunfishsimpleprod-1-5bdfff4mz5wt.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:33:59 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpuresunfishsimpleprod-1-5bdfff4mz5wt.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:34:04 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestpuresunfishsimpleprod-1-5bdfff4mz5wt.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:34:16 +0000 UTC Normal Pod simple-prod-collector-67686746b8-rmljl Binding Scheduled Successfully assigned kuttl-test-pure-sunfish/simple-prod-collector-67686746b8-rmljl to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:34:16 +0000 UTC Normal Pod simple-prod-collector-67686746b8-rmljl AddedInterface Add eth0 [10.131.0.82/23] from ovn-kubernetes logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:34:16 +0000 UTC Normal Pod simple-prod-collector-67686746b8-rmljl.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:34:16 +0000 UTC Normal Pod simple-prod-collector-67686746b8-rmljl.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:34:16 +0000 UTC Normal Pod simple-prod-collector-67686746b8-rmljl.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:34:16 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-67686746b8 SuccessfulCreate Created pod: simple-prod-collector-67686746b8-rmljl replicaset-controller logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:34:16 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-67686746b8 to 1 deployment-controller logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:34:16 +0000 UTC Normal Pod simple-prod-query-84454b859f-jbppv Binding Scheduled Successfully assigned kuttl-test-pure-sunfish/simple-prod-query-84454b859f-jbppv to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:34:16 +0000 UTC Normal Pod simple-prod-query-84454b859f-jbppv AddedInterface Add eth0 [10.128.2.104/23] from ovn-kubernetes logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:34:16 +0000 UTC Normal Pod simple-prod-query-84454b859f-jbppv.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:34:16 +0000 UTC Normal Pod simple-prod-query-84454b859f-jbppv.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:34:16 +0000 UTC Normal Pod simple-prod-query-84454b859f-jbppv.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:34:16 +0000 UTC Normal Pod simple-prod-query-84454b859f-jbppv.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:34:16 +0000 UTC Normal Pod simple-prod-query-84454b859f-jbppv.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:34:16 +0000 UTC Normal Pod simple-prod-query-84454b859f-jbppv.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:34:16 +0000 UTC Normal Pod simple-prod-query-84454b859f-jbppv.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:34:16 +0000 UTC Normal Pod simple-prod-query-84454b859f-jbppv.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:34:16 +0000 UTC Normal Pod simple-prod-query-84454b859f-jbppv.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:34:16 +0000 UTC Normal ReplicaSet.apps simple-prod-query-84454b859f SuccessfulCreate Created pod: simple-prod-query-84454b859f-jbppv replicaset-controller logger.go:42: 07:34:19 | collector-autoscale | 2023-12-04 07:34:16 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-84454b859f to 1 deployment-controller logger.go:42: 07:34:19 | collector-autoscale | Deleting namespace: kuttl-test-pure-sunfish === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (333.56s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.12s) --- PASS: kuttl/harness/collector-otlp-production-grpc (76.24s) --- PASS: kuttl/harness/set-custom-img (49.52s) --- PASS: kuttl/harness/collector-otlp-production-http (69.09s) --- PASS: kuttl/harness/collector-otlp-allinone-grpc (48.22s) --- PASS: kuttl/harness/collector-otlp-allinone-http (39.73s) --- PASS: kuttl/harness/collector-autoscale (44.46s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name miscellaneous --report --output /logs/artifacts/miscellaneous.xml ./artifacts/kuttl-report.xml time="2023-12-04T07:34:26Z" level=debug msg="Setting a new name for the test suites" time="2023-12-04T07:34:26Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-04T07:34:26Z" level=debug msg="normalizing test case names" time="2023-12-04T07:34:26Z" level=debug msg="miscellaneous/artifacts -> miscellaneous_artifacts" time="2023-12-04T07:34:26Z" level=debug msg="miscellaneous/collector-otlp-production-grpc -> miscellaneous_collector_otlp_production_grpc" time="2023-12-04T07:34:26Z" level=debug msg="miscellaneous/set-custom-img -> miscellaneous_set_custom_img" time="2023-12-04T07:34:26Z" level=debug msg="miscellaneous/collector-otlp-production-http -> miscellaneous_collector_otlp_production_http" time="2023-12-04T07:34:26Z" level=debug msg="miscellaneous/collector-otlp-allinone-grpc -> miscellaneous_collector_otlp_allinone_grpc" time="2023-12-04T07:34:26Z" level=debug msg="miscellaneous/collector-otlp-allinone-http -> miscellaneous_collector_otlp_allinone_http" time="2023-12-04T07:34:26Z" level=debug msg="miscellaneous/collector-autoscale -> miscellaneous_collector_autoscale" +----------------------------------------------+--------+ | NAME | RESULT | +----------------------------------------------+--------+ | miscellaneous_artifacts | passed | | miscellaneous_collector_otlp_production_grpc | passed | | miscellaneous_set_custom_img | passed | | miscellaneous_collector_otlp_production_http | passed | | miscellaneous_collector_otlp_allinone_grpc | passed | | miscellaneous_collector_otlp_allinone_http | passed | | miscellaneous_collector_autoscale | passed | +----------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh sidecar false true + '[' 3 -ne 3 ']' + test_suite_name=sidecar + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/sidecar.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-sidecar make[2]: Entering directory '/tmp/jaeger-tests' ./tests/e2e/sidecar/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-02-123536 True False 56m Cluster version is 4.15.0-0.nightly-2023-12-02-123536' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-02-123536 True False 56m Cluster version is 4.15.0-0.nightly-2023-12-02-123536' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/sidecar/render.sh ++ export SUITE_DIR=./tests/e2e/sidecar ++ SUITE_DIR=./tests/e2e/sidecar ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/sidecar ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + jaeger_service_name=order + start_test sidecar-deployment + '[' 1 -ne 1 ']' + test_name=sidecar-deployment + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-deployment' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-deployment\e[0m' Rendering files for test sidecar-deployment + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build + '[' _build '!=' _build ']' + mkdir -p sidecar-deployment + cd sidecar-deployment + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-namespace + '[' 1 -ne 1 ']' + test_name=sidecar-namespace + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-namespace' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-namespace\e[0m' Rendering files for test sidecar-namespace + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-deployment + '[' sidecar-deployment '!=' _build ']' + cd .. + mkdir -p sidecar-namespace + cd sidecar-namespace + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-skip-webhook + '[' 1 -ne 1 ']' + test_name=sidecar-skip-webhook + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-skip-webhook' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-skip-webhook\e[0m' Rendering files for test sidecar-skip-webhook + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-namespace + '[' sidecar-namespace '!=' _build ']' + cd .. + mkdir -p sidecar-skip-webhook + cd sidecar-skip-webhook + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running sidecar E2E tests' Running sidecar E2E tests + cd tests/e2e/sidecar/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3107041029 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 4 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/sidecar-deployment === PAUSE kuttl/harness/sidecar-deployment === RUN kuttl/harness/sidecar-namespace === PAUSE kuttl/harness/sidecar-namespace === RUN kuttl/harness/sidecar-skip-webhook === PAUSE kuttl/harness/sidecar-skip-webhook === CONT kuttl/harness/artifacts logger.go:42: 07:34:34 | artifacts | Creating namespace: kuttl-test-credible-drake logger.go:42: 07:34:34 | artifacts | artifacts events from ns kuttl-test-credible-drake: logger.go:42: 07:34:34 | artifacts | Deleting namespace: kuttl-test-credible-drake === CONT kuttl/harness/sidecar-namespace logger.go:42: 07:34:40 | sidecar-namespace | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:34:40 | sidecar-namespace | Creating namespace: kuttl-test-busy-cardinal logger.go:42: 07:34:40 | sidecar-namespace/0-install | starting test step 0-install logger.go:42: 07:34:40 | sidecar-namespace/0-install | Jaeger:kuttl-test-busy-cardinal/agent-as-sidecar created logger.go:42: 07:34:46 | sidecar-namespace/0-install | test step completed 0-install logger.go:42: 07:34:46 | sidecar-namespace/1-install | starting test step 1-install logger.go:42: 07:34:47 | sidecar-namespace/1-install | Deployment:kuttl-test-busy-cardinal/vertx-create-span-sidecar created logger.go:42: 07:34:49 | sidecar-namespace/1-install | test step completed 1-install logger.go:42: 07:34:49 | sidecar-namespace/2-enable-injection | starting test step 2-enable-injection logger.go:42: 07:34:49 | sidecar-namespace/2-enable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="true"] logger.go:42: 07:34:49 | sidecar-namespace/2-enable-injection | namespace/kuttl-test-busy-cardinal annotate logger.go:42: 07:34:51 | sidecar-namespace/2-enable-injection | test step completed 2-enable-injection logger.go:42: 07:34:51 | sidecar-namespace/3-find-service | starting test step 3-find-service logger.go:42: 07:34:51 | sidecar-namespace/3-find-service | Job:kuttl-test-busy-cardinal/00-find-service created logger.go:42: 07:35:02 | sidecar-namespace/3-find-service | test step completed 3-find-service logger.go:42: 07:35:02 | sidecar-namespace/4-other-instance | starting test step 4-other-instance logger.go:42: 07:35:02 | sidecar-namespace/4-other-instance | Jaeger:kuttl-test-busy-cardinal/agent-as-sidecar2 created logger.go:42: 07:35:10 | sidecar-namespace/4-other-instance | test step completed 4-other-instance logger.go:42: 07:35:10 | sidecar-namespace/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 07:35:11 | sidecar-namespace/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 07:35:11 | sidecar-namespace/6-find-service | starting test step 6-find-service logger.go:42: 07:35:11 | sidecar-namespace/6-find-service | Job:kuttl-test-busy-cardinal/01-find-service created logger.go:42: 07:35:30 | sidecar-namespace/6-find-service | test step completed 6-find-service logger.go:42: 07:35:30 | sidecar-namespace/7-disable-injection | starting test step 7-disable-injection logger.go:42: 07:35:30 | sidecar-namespace/7-disable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="false"] logger.go:42: 07:35:30 | sidecar-namespace/7-disable-injection | namespace/kuttl-test-busy-cardinal annotate logger.go:42: 07:35:31 | sidecar-namespace/7-disable-injection | test step completed 7-disable-injection logger.go:42: 07:35:31 | sidecar-namespace | sidecar-namespace events from ns kuttl-test-busy-cardinal: logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:44 +0000 UTC Normal Pod agent-as-sidecar-bdf456b59-vcvg4 Binding Scheduled Successfully assigned kuttl-test-busy-cardinal/agent-as-sidecar-bdf456b59-vcvg4 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:44 +0000 UTC Normal Pod agent-as-sidecar-bdf456b59-vcvg4 AddedInterface Add eth0 [10.128.2.105/23] from ovn-kubernetes logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:44 +0000 UTC Normal Pod agent-as-sidecar-bdf456b59-vcvg4.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:44 +0000 UTC Normal Pod agent-as-sidecar-bdf456b59-vcvg4.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:44 +0000 UTC Normal Pod agent-as-sidecar-bdf456b59-vcvg4.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:44 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-bdf456b59 SuccessfulCreate Created pod: agent-as-sidecar-bdf456b59-vcvg4 replicaset-controller logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:44 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-bdf456b59 to 1 deployment-controller logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:47 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-n4bsh Binding Scheduled Successfully assigned kuttl-test-busy-cardinal/vertx-create-span-sidecar-84d458b68c-n4bsh to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:47 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-n4bsh AddedInterface Add eth0 [10.129.2.51/23] from ovn-kubernetes logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:47 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-n4bsh.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:47 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-n4bsh.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:47 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-n4bsh.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:47 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulCreate Created pod: vertx-create-span-sidecar-84d458b68c-n4bsh replicaset-controller logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:47 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-84d458b68c to 1 deployment-controller logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:49 +0000 UTC Normal Pod vertx-create-span-sidecar-6fff9c89ff-9wqc5 Binding Scheduled Successfully assigned kuttl-test-busy-cardinal/vertx-create-span-sidecar-6fff9c89ff-9wqc5 to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:49 +0000 UTC Normal Pod vertx-create-span-sidecar-6fff9c89ff-9wqc5 AddedInterface Add eth0 [10.131.0.83/23] from ovn-kubernetes logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:49 +0000 UTC Normal Pod vertx-create-span-sidecar-6fff9c89ff-9wqc5.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:49 +0000 UTC Normal Pod vertx-create-span-sidecar-6fff9c89ff-9wqc5.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:49 +0000 UTC Normal Pod vertx-create-span-sidecar-6fff9c89ff-9wqc5.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:49 +0000 UTC Normal Pod vertx-create-span-sidecar-6fff9c89ff-9wqc5.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:49 +0000 UTC Normal Pod vertx-create-span-sidecar-6fff9c89ff-9wqc5.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:49 +0000 UTC Normal Pod vertx-create-span-sidecar-6fff9c89ff-9wqc5.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:49 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-6fff9c89ff SuccessfulCreate Created pod: vertx-create-span-sidecar-6fff9c89ff-9wqc5 replicaset-controller logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:49 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-6fff9c89ff to 1 deployment-controller logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:51 +0000 UTC Normal Pod 00-find-service-6rzk9 Binding Scheduled Successfully assigned kuttl-test-busy-cardinal/00-find-service-6rzk9 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:51 +0000 UTC Normal Pod 00-find-service-6rzk9 AddedInterface Add eth0 [10.128.2.106/23] from ovn-kubernetes logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:51 +0000 UTC Normal Pod 00-find-service-6rzk9.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:51 +0000 UTC Normal Pod 00-find-service-6rzk9.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:51 +0000 UTC Normal Pod 00-find-service-6rzk9.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:51 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-6rzk9 job-controller logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:55 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-n4bsh.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.51:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:55 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-n4bsh.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.129.2.51:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:57 +0000 UTC Warning Pod vertx-create-span-sidecar-6fff9c89ff-9wqc5.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.83:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:57 +0000 UTC Warning Pod vertx-create-span-sidecar-6fff9c89ff-9wqc5.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.83:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:57 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-n4bsh.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:57 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-n4bsh.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.51:8080/": read tcp 10.129.2.2:52110->10.129.2.51:8080: read: connection reset by peer kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:57 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-n4bsh.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.51:8080/": dial tcp 10.129.2.51:8080: connect: connection refused kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:34:59 +0000 UTC Normal Pod vertx-create-span-sidecar-6fff9c89ff-9wqc5.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:00 +0000 UTC Warning Pod vertx-create-span-sidecar-6fff9c89ff-9wqc5.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.83:8080/": read tcp 10.131.0.2:40236->10.131.0.83:8080: read: connection reset by peer kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:00 +0000 UTC Warning Pod vertx-create-span-sidecar-6fff9c89ff-9wqc5.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.83:8080/": dial tcp 10.131.0.83:8080: connect: connection refused kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:02 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:08 +0000 UTC Normal Pod agent-as-sidecar2-6868f7bd5c-wrzvx Binding Scheduled Successfully assigned kuttl-test-busy-cardinal/agent-as-sidecar2-6868f7bd5c-wrzvx to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:08 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-6868f7bd5c SuccessfulCreate Created pod: agent-as-sidecar2-6868f7bd5c-wrzvx replicaset-controller logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:08 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-6868f7bd5c to 1 deployment-controller logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:08 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-n4bsh.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.51:8080/": read tcp 10.129.2.2:46386->10.129.2.51:8080: read: connection reset by peer kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:09 +0000 UTC Normal Pod agent-as-sidecar2-6868f7bd5c-wrzvx AddedInterface Add eth0 [10.128.2.107/23] from ovn-kubernetes logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:09 +0000 UTC Normal Pod agent-as-sidecar2-6868f7bd5c-wrzvx.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:09 +0000 UTC Normal Pod agent-as-sidecar2-6868f7bd5c-wrzvx.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:09 +0000 UTC Normal Pod agent-as-sidecar2-6868f7bd5c-wrzvx.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:11 +0000 UTC Normal Pod 01-find-service-4587l Binding Scheduled Successfully assigned kuttl-test-busy-cardinal/01-find-service-4587l to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:11 +0000 UTC Normal Pod 01-find-service-4587l AddedInterface Add eth0 [10.129.2.52/23] from ovn-kubernetes logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:11 +0000 UTC Normal Pod 01-find-service-4587l.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:11 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-4587l job-controller logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:11 +0000 UTC Normal Pod agent-as-sidecar-bdf456b59-vcvg4.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:12 +0000 UTC Normal Pod 01-find-service-4587l.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:12 +0000 UTC Normal Pod 01-find-service-4587l.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:14 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulDelete Deleted pod: vertx-create-span-sidecar-84d458b68c-n4bsh replicaset-controller logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:14 +0000 UTC Normal Pod vertx-create-span-sidecar-b9895bbc7-zxwvw Binding Scheduled Successfully assigned kuttl-test-busy-cardinal/vertx-create-span-sidecar-b9895bbc7-zxwvw to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:14 +0000 UTC Normal Pod vertx-create-span-sidecar-b9895bbc7-zxwvw AddedInterface Add eth0 [10.128.2.108/23] from ovn-kubernetes logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:14 +0000 UTC Normal Pod vertx-create-span-sidecar-b9895bbc7-zxwvw.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:14 +0000 UTC Normal Pod vertx-create-span-sidecar-b9895bbc7-zxwvw.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:14 +0000 UTC Normal Pod vertx-create-span-sidecar-b9895bbc7-zxwvw.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:14 +0000 UTC Normal Pod vertx-create-span-sidecar-b9895bbc7-zxwvw.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:14 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-b9895bbc7 SuccessfulCreate Created pod: vertx-create-span-sidecar-b9895bbc7-zxwvw replicaset-controller logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:14 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-84d458b68c to 0 from 1 deployment-controller logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:14 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-b9895bbc7 to 1 from 0 deployment-controller logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:15 +0000 UTC Normal Pod vertx-create-span-sidecar-b9895bbc7-zxwvw.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:15 +0000 UTC Normal Pod vertx-create-span-sidecar-b9895bbc7-zxwvw.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:22 +0000 UTC Warning Pod vertx-create-span-sidecar-b9895bbc7-zxwvw.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.108:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:22 +0000 UTC Warning Pod vertx-create-span-sidecar-b9895bbc7-zxwvw.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.108:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:25 +0000 UTC Normal Pod vertx-create-span-sidecar-b9895bbc7-zxwvw.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:25 +0000 UTC Warning Pod vertx-create-span-sidecar-b9895bbc7-zxwvw.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.108:8080/": read tcp 10.128.2.2:53098->10.128.2.108:8080: read: connection reset by peer kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:25 +0000 UTC Warning Pod vertx-create-span-sidecar-b9895bbc7-zxwvw.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.108:8080/": dial tcp 10.128.2.108:8080: connect: connection refused kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:30 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:30 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-6fff9c89ff SuccessfulDelete Deleted pod: vertx-create-span-sidecar-6fff9c89ff-9wqc5 replicaset-controller logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:30 +0000 UTC Normal Pod vertx-create-span-sidecar-bc7cb4b68-bpmbj Binding Scheduled Successfully assigned kuttl-test-busy-cardinal/vertx-create-span-sidecar-bc7cb4b68-bpmbj to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:30 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-bc7cb4b68 SuccessfulCreate Created pod: vertx-create-span-sidecar-bc7cb4b68-bpmbj replicaset-controller logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:30 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-6fff9c89ff to 0 from 1 deployment-controller logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:30 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-bc7cb4b68 to 1 from 0 deployment-controller logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:31 +0000 UTC Normal Pod vertx-create-span-sidecar-bc7cb4b68-bpmbj AddedInterface Add eth0 [10.129.2.53/23] from ovn-kubernetes logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:31 +0000 UTC Normal Pod vertx-create-span-sidecar-bc7cb4b68-bpmbj.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:31 +0000 UTC Normal Pod vertx-create-span-sidecar-bc7cb4b68-bpmbj.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:35:31 | sidecar-namespace | 2023-12-04 07:35:31 +0000 UTC Normal Pod vertx-create-span-sidecar-bc7cb4b68-bpmbj.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:35:32 | sidecar-namespace | Deleting namespace: kuttl-test-busy-cardinal === CONT kuttl/harness/sidecar-skip-webhook logger.go:42: 07:35:38 | sidecar-skip-webhook | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:35:38 | sidecar-skip-webhook | Creating namespace: kuttl-test-delicate-pangolin logger.go:42: 07:35:39 | sidecar-skip-webhook/0-install | starting test step 0-install logger.go:42: 07:35:39 | sidecar-skip-webhook/0-install | Jaeger:kuttl-test-delicate-pangolin/agent-as-sidecar created logger.go:42: 07:35:44 | sidecar-skip-webhook/0-install | test step completed 0-install logger.go:42: 07:35:44 | sidecar-skip-webhook/1-install | starting test step 1-install logger.go:42: 07:35:44 | sidecar-skip-webhook/1-install | Deployment:kuttl-test-delicate-pangolin/vertx-create-span-sidecar created logger.go:42: 07:35:46 | sidecar-skip-webhook/1-install | test step completed 1-install logger.go:42: 07:35:46 | sidecar-skip-webhook/2-add-anotation-and-label | starting test step 2-add-anotation-and-label logger.go:42: 07:35:46 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name=jaeger-operator --namespace kuttl-test-delicate-pangolin] logger.go:42: 07:35:46 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar labeled logger.go:42: 07:35:46 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-delicate-pangolin] logger.go:42: 07:35:46 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar annotate logger.go:42: 07:35:46 | sidecar-skip-webhook/2-add-anotation-and-label | test step completed 2-add-anotation-and-label logger.go:42: 07:35:46 | sidecar-skip-webhook/3-remove-label | starting test step 3-remove-label logger.go:42: 07:35:46 | sidecar-skip-webhook/3-remove-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name- --namespace kuttl-test-delicate-pangolin] logger.go:42: 07:35:46 | sidecar-skip-webhook/3-remove-label | deployment.apps/vertx-create-span-sidecar unlabeled logger.go:42: 07:35:48 | sidecar-skip-webhook/3-remove-label | test step completed 3-remove-label logger.go:42: 07:35:48 | sidecar-skip-webhook | sidecar-skip-webhook events from ns kuttl-test-delicate-pangolin: logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:42 +0000 UTC Normal Pod agent-as-sidecar-7b5f7bfc69-scnl5 Binding Scheduled Successfully assigned kuttl-test-delicate-pangolin/agent-as-sidecar-7b5f7bfc69-scnl5 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:42 +0000 UTC Normal Pod agent-as-sidecar-7b5f7bfc69-scnl5 AddedInterface Add eth0 [10.128.2.109/23] from ovn-kubernetes logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:42 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-7b5f7bfc69 SuccessfulCreate Created pod: agent-as-sidecar-7b5f7bfc69-scnl5 replicaset-controller logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:42 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-7b5f7bfc69 to 1 deployment-controller logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:43 +0000 UTC Normal Pod agent-as-sidecar-7b5f7bfc69-scnl5.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:43 +0000 UTC Normal Pod agent-as-sidecar-7b5f7bfc69-scnl5.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:43 +0000 UTC Normal Pod agent-as-sidecar-7b5f7bfc69-scnl5.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:44 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-5bpjw Binding Scheduled Successfully assigned kuttl-test-delicate-pangolin/vertx-create-span-sidecar-84d458b68c-5bpjw to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:44 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-5bpjw AddedInterface Add eth0 [10.129.2.54/23] from ovn-kubernetes logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:44 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-5bpjw.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:44 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-5bpjw.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:44 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-5bpjw.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:44 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulCreate Created pod: vertx-create-span-sidecar-84d458b68c-5bpjw replicaset-controller logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:44 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-84d458b68c to 1 deployment-controller logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:46 +0000 UTC Normal Pod vertx-create-span-sidecar-549b569d49-5l7d9 Binding Scheduled Successfully assigned kuttl-test-delicate-pangolin/vertx-create-span-sidecar-549b569d49-5l7d9 to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:46 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-549b569d49 SuccessfulCreate Created pod: vertx-create-span-sidecar-549b569d49-5l7d9 replicaset-controller logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:46 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-549b569d49 to 1 deployment-controller logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:47 +0000 UTC Normal Pod vertx-create-span-sidecar-549b569d49-5l7d9 AddedInterface Add eth0 [10.131.0.84/23] from ovn-kubernetes logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:47 +0000 UTC Normal Pod vertx-create-span-sidecar-549b569d49-5l7d9.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:47 +0000 UTC Normal Pod vertx-create-span-sidecar-549b569d49-5l7d9.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:47 +0000 UTC Normal Pod vertx-create-span-sidecar-549b569d49-5l7d9.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:47 +0000 UTC Normal Pod vertx-create-span-sidecar-549b569d49-5l7d9.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:47 +0000 UTC Normal Pod vertx-create-span-sidecar-549b569d49-5l7d9.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:35:48 | sidecar-skip-webhook | 2023-12-04 07:35:47 +0000 UTC Normal Pod vertx-create-span-sidecar-549b569d49-5l7d9.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:35:48 | sidecar-skip-webhook | Deleting namespace: kuttl-test-delicate-pangolin === CONT kuttl/harness/sidecar-deployment logger.go:42: 07:35:54 | sidecar-deployment | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:35:54 | sidecar-deployment | Creating namespace: kuttl-test-certain-boxer logger.go:42: 07:35:54 | sidecar-deployment/0-install | starting test step 0-install logger.go:42: 07:35:54 | sidecar-deployment/0-install | Jaeger:kuttl-test-certain-boxer/agent-as-sidecar created logger.go:42: 07:35:59 | sidecar-deployment/0-install | test step completed 0-install logger.go:42: 07:35:59 | sidecar-deployment/1-install | starting test step 1-install logger.go:42: 07:35:59 | sidecar-deployment/1-install | Deployment:kuttl-test-certain-boxer/vertx-create-span-sidecar created logger.go:42: 07:36:00 | sidecar-deployment/1-install | test step completed 1-install logger.go:42: 07:36:00 | sidecar-deployment/2-enable-injection | starting test step 2-enable-injection logger.go:42: 07:36:00 | sidecar-deployment/2-enable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-certain-boxer] logger.go:42: 07:36:00 | sidecar-deployment/2-enable-injection | deployment.apps/vertx-create-span-sidecar annotate logger.go:42: 07:36:02 | sidecar-deployment/2-enable-injection | test step completed 2-enable-injection logger.go:42: 07:36:02 | sidecar-deployment/3-find-service | starting test step 3-find-service logger.go:42: 07:36:02 | sidecar-deployment/3-find-service | Job:kuttl-test-certain-boxer/00-find-service created logger.go:42: 07:36:13 | sidecar-deployment/3-find-service | test step completed 3-find-service logger.go:42: 07:36:13 | sidecar-deployment/4-other-instance | starting test step 4-other-instance logger.go:42: 07:36:13 | sidecar-deployment/4-other-instance | Jaeger:kuttl-test-certain-boxer/agent-as-sidecar2 created logger.go:42: 07:36:19 | sidecar-deployment/4-other-instance | test step completed 4-other-instance logger.go:42: 07:36:19 | sidecar-deployment/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 07:36:19 | sidecar-deployment/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 07:36:19 | sidecar-deployment/6-find-service | starting test step 6-find-service logger.go:42: 07:36:19 | sidecar-deployment/6-find-service | Job:kuttl-test-certain-boxer/01-find-service created logger.go:42: 07:36:39 | sidecar-deployment/6-find-service | test step completed 6-find-service logger.go:42: 07:36:39 | sidecar-deployment/7-disable-injection | starting test step 7-disable-injection logger.go:42: 07:36:39 | sidecar-deployment/7-disable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=false --namespace kuttl-test-certain-boxer] logger.go:42: 07:36:39 | sidecar-deployment/7-disable-injection | deployment.apps/vertx-create-span-sidecar annotate logger.go:42: 07:36:41 | sidecar-deployment/7-disable-injection | test step completed 7-disable-injection logger.go:42: 07:36:41 | sidecar-deployment | sidecar-deployment events from ns kuttl-test-certain-boxer: logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:35:58 +0000 UTC Normal Pod agent-as-sidecar-57bd77dbc6-d79w6 Binding Scheduled Successfully assigned kuttl-test-certain-boxer/agent-as-sidecar-57bd77dbc6-d79w6 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:35:58 +0000 UTC Normal Pod agent-as-sidecar-57bd77dbc6-d79w6 AddedInterface Add eth0 [10.128.2.110/23] from ovn-kubernetes logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:35:58 +0000 UTC Normal Pod agent-as-sidecar-57bd77dbc6-d79w6.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:35:58 +0000 UTC Normal Pod agent-as-sidecar-57bd77dbc6-d79w6.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:35:58 +0000 UTC Normal Pod agent-as-sidecar-57bd77dbc6-d79w6.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:35:58 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-57bd77dbc6 SuccessfulCreate Created pod: agent-as-sidecar-57bd77dbc6-d79w6 replicaset-controller logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:35:58 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-57bd77dbc6 to 1 deployment-controller logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:35:59 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-7xjl6 Binding Scheduled Successfully assigned kuttl-test-certain-boxer/vertx-create-span-sidecar-84d458b68c-7xjl6 to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:35:59 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulCreate Created pod: vertx-create-span-sidecar-84d458b68c-7xjl6 replicaset-controller logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:35:59 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-84d458b68c to 1 deployment-controller logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:00 +0000 UTC Normal Pod vertx-create-span-sidecar-84488d9894-tpk2x Binding Scheduled Successfully assigned kuttl-test-certain-boxer/vertx-create-span-sidecar-84488d9894-tpk2x to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:00 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84488d9894 SuccessfulCreate Created pod: vertx-create-span-sidecar-84488d9894-tpk2x replicaset-controller logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:00 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-7xjl6 AddedInterface Add eth0 [10.129.2.55/23] from ovn-kubernetes logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:00 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-7xjl6.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:00 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-7xjl6.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:00 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-7xjl6.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:00 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-84488d9894 to 1 deployment-controller logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:01 +0000 UTC Normal Pod vertx-create-span-sidecar-84488d9894-tpk2x AddedInterface Add eth0 [10.131.0.85/23] from ovn-kubernetes logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:01 +0000 UTC Normal Pod vertx-create-span-sidecar-84488d9894-tpk2x.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:01 +0000 UTC Normal Pod vertx-create-span-sidecar-84488d9894-tpk2x.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:01 +0000 UTC Normal Pod vertx-create-span-sidecar-84488d9894-tpk2x.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:01 +0000 UTC Normal Pod vertx-create-span-sidecar-84488d9894-tpk2x.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:01 +0000 UTC Normal Pod vertx-create-span-sidecar-84488d9894-tpk2x.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:01 +0000 UTC Normal Pod vertx-create-span-sidecar-84488d9894-tpk2x.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:02 +0000 UTC Normal Pod 00-find-service-dt5sm Binding Scheduled Successfully assigned kuttl-test-certain-boxer/00-find-service-dt5sm to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:02 +0000 UTC Normal Pod 00-find-service-dt5sm AddedInterface Add eth0 [10.128.2.111/23] from ovn-kubernetes logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:02 +0000 UTC Normal Pod 00-find-service-dt5sm.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:02 +0000 UTC Normal Pod 00-find-service-dt5sm.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:02 +0000 UTC Normal Pod 00-find-service-dt5sm.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:02 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-dt5sm job-controller logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:08 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-7xjl6.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.129.2.55:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:08 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-7xjl6.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.55:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:09 +0000 UTC Warning Pod vertx-create-span-sidecar-84488d9894-tpk2x.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.85:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:09 +0000 UTC Warning Pod vertx-create-span-sidecar-84488d9894-tpk2x.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.85:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:10 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-7xjl6.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:10 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-7xjl6.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.55:8080/": read tcp 10.129.2.2:39816->10.129.2.55:8080: read: connection reset by peer kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:10 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-7xjl6.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.55:8080/": dial tcp 10.129.2.55:8080: connect: connection refused kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:11 +0000 UTC Normal Pod vertx-create-span-sidecar-84488d9894-tpk2x.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:11 +0000 UTC Warning Pod vertx-create-span-sidecar-84488d9894-tpk2x.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.85:8080/": read tcp 10.131.0.2:34512->10.131.0.85:8080: read: connection reset by peer kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:11 +0000 UTC Warning Pod vertx-create-span-sidecar-84488d9894-tpk2x.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.85:8080/": dial tcp 10.131.0.85:8080: connect: connection refused kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:13 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:16 +0000 UTC Normal Pod agent-as-sidecar2-64d6f7f776-6t6t4 Binding Scheduled Successfully assigned kuttl-test-certain-boxer/agent-as-sidecar2-64d6f7f776-6t6t4 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:16 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-64d6f7f776 SuccessfulCreate Created pod: agent-as-sidecar2-64d6f7f776-6t6t4 replicaset-controller logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:16 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-64d6f7f776 to 1 deployment-controller logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:17 +0000 UTC Normal Pod agent-as-sidecar2-64d6f7f776-6t6t4 AddedInterface Add eth0 [10.128.2.112/23] from ovn-kubernetes logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:17 +0000 UTC Normal Pod agent-as-sidecar2-64d6f7f776-6t6t4.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:17 +0000 UTC Normal Pod agent-as-sidecar2-64d6f7f776-6t6t4.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:17 +0000 UTC Normal Pod agent-as-sidecar2-64d6f7f776-6t6t4.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:19 +0000 UTC Normal Pod 01-find-service-dphdt Binding Scheduled Successfully assigned kuttl-test-certain-boxer/01-find-service-dphdt to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:19 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-dphdt job-controller logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:19 +0000 UTC Normal Pod agent-as-sidecar-57bd77dbc6-d79w6.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:20 +0000 UTC Normal Pod 01-find-service-dphdt AddedInterface Add eth0 [10.129.2.56/23] from ovn-kubernetes logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:20 +0000 UTC Normal Pod 01-find-service-dphdt.spec.containers{asserts-container} Pulled Container image "registry.build03.ci.openshift.org/ci-op-8sv8qw3g/pipeline@sha256:7cce92f9a908a4498f41ce122d4b2d0df76fc62b2403522555998aa04bba3ac3" already present on machine kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:20 +0000 UTC Normal Pod 01-find-service-dphdt.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:20 +0000 UTC Normal Pod 01-find-service-dphdt.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:20 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-7xjl6.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.2.55:8080/": read tcp 10.129.2.2:54714->10.129.2.55:8080: read: connection reset by peer kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:24 +0000 UTC Normal Pod vertx-create-span-sidecar-7bbd685876-whtcv Binding Scheduled Successfully assigned kuttl-test-certain-boxer/vertx-create-span-sidecar-7bbd685876-whtcv to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:24 +0000 UTC Normal Pod vertx-create-span-sidecar-7bbd685876-whtcv AddedInterface Add eth0 [10.128.2.113/23] from ovn-kubernetes logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:24 +0000 UTC Normal Pod vertx-create-span-sidecar-7bbd685876-whtcv.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:24 +0000 UTC Normal Pod vertx-create-span-sidecar-7bbd685876-whtcv.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:24 +0000 UTC Normal Pod vertx-create-span-sidecar-7bbd685876-whtcv.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:24 +0000 UTC Normal Pod vertx-create-span-sidecar-7bbd685876-whtcv.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:24 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7bbd685876 SuccessfulCreate Created pod: vertx-create-span-sidecar-7bbd685876-whtcv replicaset-controller logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:24 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulDelete Deleted pod: vertx-create-span-sidecar-84d458b68c-7xjl6 replicaset-controller logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:24 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-84d458b68c to 0 from 1 deployment-controller logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:24 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-7bbd685876 to 1 from 0 deployment-controller logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:25 +0000 UTC Normal Pod vertx-create-span-sidecar-7bbd685876-whtcv.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:25 +0000 UTC Normal Pod vertx-create-span-sidecar-7bbd685876-whtcv.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:32 +0000 UTC Warning Pod vertx-create-span-sidecar-7bbd685876-whtcv.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.113:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:32 +0000 UTC Warning Pod vertx-create-span-sidecar-7bbd685876-whtcv.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.113:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:34 +0000 UTC Normal Pod vertx-create-span-sidecar-7bbd685876-whtcv.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:35 +0000 UTC Warning Pod vertx-create-span-sidecar-7bbd685876-whtcv.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.113:8080/": read tcp 10.128.2.2:47760->10.128.2.113:8080: read: connection reset by peer kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:35 +0000 UTC Warning Pod vertx-create-span-sidecar-7bbd685876-whtcv.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.113:8080/": dial tcp 10.128.2.113:8080: connect: connection refused kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:38 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:39 +0000 UTC Normal Pod vertx-create-span-sidecar-76d497fff5-fsncr Binding Scheduled Successfully assigned kuttl-test-certain-boxer/vertx-create-span-sidecar-76d497fff5-fsncr to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:39 +0000 UTC Normal Pod vertx-create-span-sidecar-76d497fff5-fsncr AddedInterface Add eth0 [10.129.2.57/23] from ovn-kubernetes logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:39 +0000 UTC Normal Pod vertx-create-span-sidecar-76d497fff5-fsncr.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:39 +0000 UTC Normal Pod vertx-create-span-sidecar-76d497fff5-fsncr.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:39 +0000 UTC Normal Pod vertx-create-span-sidecar-76d497fff5-fsncr.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:39 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-76d497fff5 SuccessfulCreate Created pod: vertx-create-span-sidecar-76d497fff5-fsncr replicaset-controller logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:39 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84488d9894 SuccessfulDelete Deleted pod: vertx-create-span-sidecar-84488d9894-tpk2x replicaset-controller logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:39 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-84488d9894 to 0 from 1 deployment-controller logger.go:42: 07:36:41 | sidecar-deployment | 2023-12-04 07:36:39 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-76d497fff5 to 1 from 0 deployment-controller logger.go:42: 07:36:41 | sidecar-deployment | Deleting namespace: kuttl-test-certain-boxer === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (134.66s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.34s) --- PASS: kuttl/harness/sidecar-namespace (58.37s) --- PASS: kuttl/harness/sidecar-skip-webhook (15.40s) --- PASS: kuttl/harness/sidecar-deployment (54.38s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name sidecar --report --output /logs/artifacts/sidecar.xml ./artifacts/kuttl-report.xml time="2023-12-04T07:36:48Z" level=debug msg="Setting a new name for the test suites" time="2023-12-04T07:36:48Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-04T07:36:48Z" level=debug msg="normalizing test case names" time="2023-12-04T07:36:48Z" level=debug msg="sidecar/artifacts -> sidecar_artifacts" time="2023-12-04T07:36:48Z" level=debug msg="sidecar/sidecar-namespace -> sidecar_sidecar_namespace" time="2023-12-04T07:36:48Z" level=debug msg="sidecar/sidecar-skip-webhook -> sidecar_sidecar_skip_webhook" time="2023-12-04T07:36:48Z" level=debug msg="sidecar/sidecar-deployment -> sidecar_sidecar_deployment" +------------------------------+--------+ | NAME | RESULT | +------------------------------+--------+ | sidecar_artifacts | passed | | sidecar_sidecar_namespace | passed | | sidecar_sidecar_skip_webhook | passed | | sidecar_sidecar_deployment | passed | +------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh streaming false true + '[' 3 -ne 3 ']' + test_suite_name=streaming + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/streaming.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-streaming make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=0.32.0 \ SKIP_KAFKA=false \ SKIP_ES_EXTERNAL=true \ ./tests/e2e/streaming/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-02-123536 True False 58m Cluster version is 4.15.0-0.nightly-2023-12-02-123536' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-02-123536 True False 58m Cluster version is 4.15.0-0.nightly-2023-12-02-123536' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 0.32.0 ']' ++ version_le 0.32.0 0.25.0 +++ echo 0.32.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 0.32.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/streaming/render.sh ++ export SUITE_DIR=./tests/e2e/streaming ++ SUITE_DIR=./tests/e2e/streaming ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/streaming ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + '[' false = true ']' + start_test streaming-simple + '[' 1 -ne 1 ']' + test_name=streaming-simple + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-simple' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-simple\e[0m' Rendering files for test streaming-simple + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build + '[' _build '!=' _build ']' + mkdir -p streaming-simple + cd streaming-simple + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./1-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/streaming-jaeger-assert.yaml.template -o ./04-assert.yaml + render_smoke_test simple-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=simple-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + export JAEGER_NAME=simple-streaming + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-tls + '[' 1 -ne 1 ']' + test_name=streaming-with-tls + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-tls' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-tls\e[0m' Rendering files for test streaming-with-tls + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-simple + '[' streaming-simple '!=' _build ']' + cd .. + mkdir -p streaming-with-tls + cd streaming-with-tls + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./1-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + render_smoke_test tls-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=tls-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + export JAEGER_NAME=tls-streaming + JAEGER_NAME=tls-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-autoprovisioning-autoscale + '[' 1 -ne 1 ']' + test_name=streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-autoprovisioning-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-autoprovisioning-autoscale\e[0m' Rendering files for test streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-with-tls + '[' streaming-with-tls '!=' _build ']' + cd .. + mkdir -p streaming-with-autoprovisioning-autoscale + cd streaming-with-autoprovisioning-autoscale + '[' true = true ']' + rm ./00-install.yaml ./00-assert.yaml + render_install_elasticsearch upstream 01 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=01 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./01-assert.yaml + jaeger_name=auto-provisioned + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="20Mi"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="500m"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.autoscale=true ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.minReplicas=1 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.maxReplicas=2 ./02-install.yaml + render_assert_kafka true auto-provisioned 03 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provisioned + test_step=03 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./03-assert.yaml ++ expr 03 + 1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./4-assert.yaml ++ expr 03 + 2 + CLUSTER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./05-assert.yaml + render_install_tracegen auto-provisioned 06 + '[' 2 -ne 2 ']' + jaeger=auto-provisioned + step=06 + replicas=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/tracegen.yaml -o ./06-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.replicas=1 ./06-install.yaml + sed -i s~simple-prod~auto-provisioned~gi ./06-install.yaml + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-tracegen.yaml.template -o ./06-assert.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running streaming E2E tests' Running streaming E2E tests + cd tests/e2e/streaming/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3107041029 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 4 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/streaming-simple === PAUSE kuttl/harness/streaming-simple === RUN kuttl/harness/streaming-with-autoprovisioning-autoscale === PAUSE kuttl/harness/streaming-with-autoprovisioning-autoscale === RUN kuttl/harness/streaming-with-tls === PAUSE kuttl/harness/streaming-with-tls === CONT kuttl/harness/artifacts logger.go:42: 07:37:01 | artifacts | Creating namespace: kuttl-test-chief-crab logger.go:42: 07:37:01 | artifacts | artifacts events from ns kuttl-test-chief-crab: logger.go:42: 07:37:01 | artifacts | Deleting namespace: kuttl-test-chief-crab === CONT kuttl/harness/streaming-with-autoprovisioning-autoscale logger.go:42: 07:37:08 | streaming-with-autoprovisioning-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:37:08 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:37:08 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:37:08 | streaming-with-autoprovisioning-autoscale | Creating namespace: kuttl-test-deep-skunk logger.go:42: 07:37:08 | streaming-with-autoprovisioning-autoscale/1-install | starting test step 1-install logger.go:42: 07:37:08 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc create sa deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 07:37:08 | streaming-with-autoprovisioning-autoscale/1-install | serviceaccount/deploy-elasticsearch created logger.go:42: 07:37:08 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc adm policy add-scc-to-user privileged -z deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 07:37:08 | streaming-with-autoprovisioning-autoscale/1-install | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:privileged added: "deploy-elasticsearch" logger.go:42: 07:37:08 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 6] logger.go:42: 07:37:14 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_0.yml -n $NAMESPACE] logger.go:42: 07:37:14 | streaming-with-autoprovisioning-autoscale/1-install | statefulset.apps/elasticsearch created logger.go:42: 07:37:14 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 3] logger.go:42: 07:37:17 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_1.yml -n $NAMESPACE] logger.go:42: 07:37:18 | streaming-with-autoprovisioning-autoscale/1-install | service/elasticsearch created logger.go:42: 07:37:35 | streaming-with-autoprovisioning-autoscale/1-install | test step completed 1-install logger.go:42: 07:37:35 | streaming-with-autoprovisioning-autoscale/2-install | starting test step 2-install logger.go:42: 07:37:35 | streaming-with-autoprovisioning-autoscale/2-install | Jaeger:kuttl-test-deep-skunk/auto-provisioned created logger.go:42: 07:37:35 | streaming-with-autoprovisioning-autoscale/2-install | test step completed 2-install logger.go:42: 07:37:35 | streaming-with-autoprovisioning-autoscale/3- | starting test step 3- logger.go:42: 07:38:16 | streaming-with-autoprovisioning-autoscale/3- | test step completed 3- logger.go:42: 07:38:16 | streaming-with-autoprovisioning-autoscale/4- | starting test step 4- logger.go:42: 07:38:48 | streaming-with-autoprovisioning-autoscale/4- | test step completed 4- logger.go:42: 07:38:48 | streaming-with-autoprovisioning-autoscale/5- | starting test step 5- logger.go:42: 07:39:10 | streaming-with-autoprovisioning-autoscale/5- | test step completed 5- logger.go:42: 07:39:10 | streaming-with-autoprovisioning-autoscale/6-install | starting test step 6-install logger.go:42: 07:39:10 | streaming-with-autoprovisioning-autoscale/6-install | Deployment:kuttl-test-deep-skunk/tracegen created logger.go:42: 07:39:18 | streaming-with-autoprovisioning-autoscale/6-install | test step completed 6-install logger.go:42: 07:39:18 | streaming-with-autoprovisioning-autoscale/7- | starting test step 7- logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale/7- | test step failed 7- case.go:364: failed in step 7- case.go:366: --- Deployment:kuttl-test-deep-skunk/auto-provisioned-ingester +++ Deployment:kuttl-test-deep-skunk/auto-provisioned-ingester @@ -1,8 +1,320 @@ apiVersion: apps/v1 kind: Deployment metadata: + labels: + app: jaeger + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: auto-provisioned + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: auto-provisioned-ingester + app.kubernetes.io/part-of: jaeger + managedFields: + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:ownerReferences: + .: {} + k:{"uid":"d703f995-28ea-4638-955d-c5c9620551fb"}: {} + f:spec: + f:progressDeadlineSeconds: {} + f:replicas: {} + f:revisionHistoryLimit: {} + f:selector: {} + f:strategy: + f:type: {} + f:template: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.istio.io/inject: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:spec: + f:containers: + k:{"name":"jaeger-ingester"}: + .: {} + f:args: {} + f:env: + .: {} + k:{"name":"SPAN_STORAGE_TYPE"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:livenessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":14270,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:readinessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:resources: + .: {} + f:requests: + .: {} + f:memory: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/var/run/secrets/auto-provisioned"}: + .: {} + f:mountPath: {} + f:name: {} + k:{"mountPath":"/var/run/secrets/auto-provisioned-cluster-ca"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:enableServiceLinks: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:serviceAccount: {} + f:serviceAccountName: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"auto-provisioned-trusted-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"kafkauser-auto-provisioned"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"kafkauser-auto-provisioned-cluster-ca"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: Go-http-client + operation: Update + time: "2023-12-04T07:39:12Z" + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:deployment.kubernetes.io/revision: {} + f:status: + f:availableReplicas: {} + f:conditions: + .: {} + k:{"type":"Available"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + k:{"type":"Progressing"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + f:observedGeneration: {} + f:readyReplicas: {} + f:replicas: {} + f:updatedReplicas: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2023-12-04T07:39:14Z" name: auto-provisioned-ingester namespace: kuttl-test-deep-skunk + ownerReferences: + - apiVersion: jaegertracing.io/v1 + controller: true + kind: Jaeger + name: auto-provisioned + uid: d703f995-28ea-4638-955d-c5c9620551fb +spec: + progressDeadlineSeconds: 600 + replicas: 1 + revisionHistoryLimit: 10 + selector: + matchLabels: + app: jaeger + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: auto-provisioned + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: auto-provisioned-ingester + app.kubernetes.io/part-of: jaeger + strategy: + type: Recreate + template: + metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "14270" + prometheus.io/scrape: "true" + sidecar.istio.io/inject: "false" + creationTimestamp: null + labels: + app: jaeger + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: auto-provisioned + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: auto-provisioned-ingester + app.kubernetes.io/part-of: jaeger + spec: + containers: + - args: + - --es.server-urls=http://elasticsearch:9200 + - --kafka.consumer.authentication=tls + - --kafka.consumer.brokers=auto-provisioned-kafka-bootstrap.kuttl-test-deep-skunk.svc.cluster.local:9093 + - --kafka.consumer.tls.ca=/var/run/secrets/auto-provisioned-cluster-ca/ca.crt + - --kafka.consumer.tls.cert=/var/run/secrets/auto-provisioned/user.crt + - --kafka.consumer.tls.enabled=true + - --kafka.consumer.tls.key=/var/run/secrets/auto-provisioned/user.key + env: + - name: SPAN_STORAGE_TYPE + value: elasticsearch + image: registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:109a3676bfaccda0021c57bbe82bceed5140faaedb8bad4d7d2be7cd660de039 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 14270 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-ingester + ports: + - containerPort: 14270 + name: admin-http + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 14270 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: + requests: + memory: 500m + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /var/run/secrets/auto-provisioned + name: kafkauser-auto-provisioned + - mountPath: /var/run/secrets/auto-provisioned-cluster-ca + name: kafkauser-auto-provisioned-cluster-ca + - mountPath: /etc/pki/ca-trust/extracted/pem + name: auto-provisioned-trusted-ca + readOnly: true + dnsPolicy: ClusterFirst + enableServiceLinks: false + restartPolicy: Always + schedulerName: default-scheduler + securityContext: {} + serviceAccount: auto-provisioned + serviceAccountName: auto-provisioned + terminationGracePeriodSeconds: 30 + volumes: + - name: kafkauser-auto-provisioned + secret: + defaultMode: 420 + secretName: auto-provisioned + - name: kafkauser-auto-provisioned-cluster-ca + secret: + defaultMode: 420 + secretName: auto-provisioned-cluster-ca-cert + - configMap: + defaultMode: 420 + items: + - key: ca-bundle.crt + path: tls-ca-bundle.pem + name: auto-provisioned-trusted-ca + name: auto-provisioned-trusted-ca status: - readyReplicas: 2 + availableReplicas: 1 + conditions: + - lastTransitionTime: "2023-12-04T07:39:14Z" + lastUpdateTime: "2023-12-04T07:39:14Z" + message: Deployment has minimum availability. + reason: MinimumReplicasAvailable + status: "True" + type: Available + - lastTransitionTime: "2023-12-04T07:39:12Z" + lastUpdateTime: "2023-12-04T07:39:14Z" + message: ReplicaSet "auto-provisioned-ingester-57856495d6" has successfully progressed. + reason: NewReplicaSetAvailable + status: "True" + type: Progressing + observedGeneration: 1 + readyReplicas: 1 + replicas: 1 + updatedReplicas: 1 case.go:366: resource Deployment:kuttl-test-deep-skunk/auto-provisioned-ingester: .status.readyReplicas: value mismatch, expected: 2 != actual: 1 logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | streaming-with-autoprovisioning-autoscale events from ns kuttl-test-deep-skunk: logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:37:14 +0000 UTC Normal Pod elasticsearch-0 Binding Scheduled Successfully assigned kuttl-test-deep-skunk/elasticsearch-0 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:37:14 +0000 UTC Normal StatefulSet.apps elasticsearch SuccessfulCreate create Pod elasticsearch-0 in StatefulSet elasticsearch successful statefulset-controller logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:37:15 +0000 UTC Normal Pod elasticsearch-0 AddedInterface Add eth0 [10.128.2.114/23] from ovn-kubernetes logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:37:15 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulling Pulling image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:37:23 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulled Successfully pulled image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" in 8.247s (8.247s including waiting) kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:37:23 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:37:23 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:37:30 +0000 UTC Warning Pod elasticsearch-0.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Get "http://10.128.2.114:9200/": dial tcp 10.128.2.114:9200: connect: connection refused kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:37:40 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:37:41 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-zookeeper NoPods No matching pods found controllermanager logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:37:41 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:37:41 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-deep-skunk/data-auto-provisioned-zookeeper-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-5dbd99d646-z4wrh_f641fef9-5739-436e-be4f-a8f18fb64662 logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:37:45 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-61f18805-0217-47ac-88e5-9536d4682213 ebs.csi.aws.com_aws-ebs-csi-driver-controller-5dbd99d646-z4wrh_f641fef9-5739-436e-be4f-a8f18fb64662 logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:37:46 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-deep-skunk/auto-provisioned-zookeeper-0 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:37:47 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-61f18805-0217-47ac-88e5-9536d4682213" attachdetach-controller logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:37:56 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 AddedInterface Add eth0 [10.128.2.115/23] from ovn-kubernetes logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:37:56 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" already present on machine kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:37:56 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:37:56 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:17 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-kafka NoPods No matching pods found controllermanager logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:17 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:17 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:17 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-deep-skunk/data-0-auto-provisioned-kafka-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-5dbd99d646-z4wrh_f641fef9-5739-436e-be4f-a8f18fb64662 logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:22 +0000 UTC Normal Pod auto-provisioned-kafka-0 Binding Scheduled Successfully assigned kuttl-test-deep-skunk/auto-provisioned-kafka-0 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:22 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-4237a398-1886-4679-8f9d-d0cef571f069 ebs.csi.aws.com_aws-ebs-csi-driver-controller-5dbd99d646-z4wrh_f641fef9-5739-436e-be4f-a8f18fb64662 logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:24 +0000 UTC Normal Pod auto-provisioned-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-4237a398-1886-4679-8f9d-d0cef571f069" attachdetach-controller logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:27 +0000 UTC Normal Pod auto-provisioned-kafka-0 AddedInterface Add eth0 [10.128.2.116/23] from ovn-kubernetes logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:27 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" already present on machine kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:27 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:27 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:49 +0000 UTC Normal Pod auto-provisioned-entity-operator-8689997c78-fq6x9 Binding Scheduled Successfully assigned kuttl-test-deep-skunk/auto-provisioned-entity-operator-8689997c78-fq6x9 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:49 +0000 UTC Normal Pod auto-provisioned-entity-operator-8689997c78-fq6x9 AddedInterface Add eth0 [10.128.2.117/23] from ovn-kubernetes logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:49 +0000 UTC Normal Pod auto-provisioned-entity-operator-8689997c78-fq6x9.spec.containers{topic-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:464b04e622e0b3472e8a1e1ce8a2efd32cf27fc2056d3d589bfe6b5f9ac0bf4e" already present on machine kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:49 +0000 UTC Normal Pod auto-provisioned-entity-operator-8689997c78-fq6x9.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:49 +0000 UTC Normal Pod auto-provisioned-entity-operator-8689997c78-fq6x9.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:49 +0000 UTC Normal Pod auto-provisioned-entity-operator-8689997c78-fq6x9.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel8-operator@sha256:464b04e622e0b3472e8a1e1ce8a2efd32cf27fc2056d3d589bfe6b5f9ac0bf4e" already present on machine kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:49 +0000 UTC Normal Pod auto-provisioned-entity-operator-8689997c78-fq6x9.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:49 +0000 UTC Normal Pod auto-provisioned-entity-operator-8689997c78-fq6x9.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:49 +0000 UTC Normal Pod auto-provisioned-entity-operator-8689997c78-fq6x9.spec.containers{tls-sidecar} Pulled Container image "registry.redhat.io/amq-streams/kafka-35-rhel8@sha256:0bcdd55f01638f650ed69ebdf5f8a7291e103805b8cbb34013ced88e46e0678c" already present on machine kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:49 +0000 UTC Normal Pod auto-provisioned-entity-operator-8689997c78-fq6x9.spec.containers{tls-sidecar} Created Created container tls-sidecar kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:49 +0000 UTC Normal ReplicaSet.apps auto-provisioned-entity-operator-8689997c78 SuccessfulCreate Created pod: auto-provisioned-entity-operator-8689997c78-fq6x9 replicaset-controller logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:49 +0000 UTC Normal Deployment.apps auto-provisioned-entity-operator ScalingReplicaSet Scaled up replica set auto-provisioned-entity-operator-8689997c78 to 1 deployment-controller logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:38:50 +0000 UTC Normal Pod auto-provisioned-entity-operator-8689997c78-fq6x9.spec.containers{tls-sidecar} Started Started container tls-sidecar kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:10 +0000 UTC Normal Pod tracegen-76b5cd46c4-rl7q2 Binding Scheduled Successfully assigned kuttl-test-deep-skunk/tracegen-76b5cd46c4-rl7q2 to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:10 +0000 UTC Warning Pod tracegen-76b5cd46c4-rl7q2 FailedMount MountVolume.SetUp failed for volume "auto-provisioned-trusted-ca" : configmap "auto-provisioned-trusted-ca" not found kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:10 +0000 UTC Warning Pod tracegen-76b5cd46c4-rl7q2 FailedMount MountVolume.SetUp failed for volume "auto-provisioned-service-ca" : configmap "auto-provisioned-service-ca" not found kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:10 +0000 UTC Normal ReplicaSet.apps tracegen-76b5cd46c4 SuccessfulCreate Created pod: tracegen-76b5cd46c4-rl7q2 replicaset-controller logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:10 +0000 UTC Normal Deployment.apps tracegen ScalingReplicaSet Scaled up replica set tracegen-76b5cd46c4 to 1 deployment-controller logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:12 +0000 UTC Normal Pod auto-provisioned-collector-69f9d84747-t7v6d Binding Scheduled Successfully assigned kuttl-test-deep-skunk/auto-provisioned-collector-69f9d84747-t7v6d to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:12 +0000 UTC Warning Pod auto-provisioned-collector-69f9d84747-t7v6d FailedMount MountVolume.SetUp failed for volume "auto-provisioned-collector-tls-config-volume" : secret "auto-provisioned-collector-headless-tls" not found kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:12 +0000 UTC Normal ReplicaSet.apps auto-provisioned-collector-69f9d84747 SuccessfulCreate Created pod: auto-provisioned-collector-69f9d84747-t7v6d replicaset-controller logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:12 +0000 UTC Normal Deployment.apps auto-provisioned-collector ScalingReplicaSet Scaled up replica set auto-provisioned-collector-69f9d84747 to 1 deployment-controller logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:12 +0000 UTC Normal Pod auto-provisioned-ingester-57856495d6-bk46g Binding Scheduled Successfully assigned kuttl-test-deep-skunk/auto-provisioned-ingester-57856495d6-bk46g to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:12 +0000 UTC Normal Pod auto-provisioned-ingester-57856495d6-bk46g AddedInterface Add eth0 [10.131.0.86/23] from ovn-kubernetes logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:12 +0000 UTC Normal Pod auto-provisioned-ingester-57856495d6-bk46g.spec.containers{jaeger-ingester} Pulled Container image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:109a3676bfaccda0021c57bbe82bceed5140faaedb8bad4d7d2be7cd660de039" already present on machine kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:12 +0000 UTC Normal ReplicaSet.apps auto-provisioned-ingester-57856495d6 SuccessfulCreate Created pod: auto-provisioned-ingester-57856495d6-bk46g replicaset-controller logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:12 +0000 UTC Normal Deployment.apps auto-provisioned-ingester ScalingReplicaSet Scaled up replica set auto-provisioned-ingester-57856495d6 to 1 deployment-controller logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:12 +0000 UTC Normal Pod auto-provisioned-query-54b94b8d66-pfrc4 Binding Scheduled Successfully assigned kuttl-test-deep-skunk/auto-provisioned-query-54b94b8d66-pfrc4 to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:12 +0000 UTC Normal Pod auto-provisioned-query-54b94b8d66-pfrc4 AddedInterface Add eth0 [10.131.0.87/23] from ovn-kubernetes logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:12 +0000 UTC Normal Pod auto-provisioned-query-54b94b8d66-pfrc4.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:12 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-54b94b8d66 SuccessfulCreate Created pod: auto-provisioned-query-54b94b8d66-pfrc4 replicaset-controller logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:12 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-54b94b8d66 to 1 deployment-controller logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:13 +0000 UTC Normal Pod auto-provisioned-collector-69f9d84747-t7v6d AddedInterface Add eth0 [10.129.2.59/23] from ovn-kubernetes logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:13 +0000 UTC Normal Pod auto-provisioned-collector-69f9d84747-t7v6d.spec.containers{jaeger-collector} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:13 +0000 UTC Normal Pod auto-provisioned-ingester-57856495d6-bk46g.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:13 +0000 UTC Normal Pod auto-provisioned-ingester-57856495d6-bk46g.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:13 +0000 UTC Normal Pod auto-provisioned-query-54b94b8d66-pfrc4.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:13 +0000 UTC Normal Pod auto-provisioned-query-54b94b8d66-pfrc4.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:13 +0000 UTC Normal Pod auto-provisioned-query-54b94b8d66-pfrc4.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:13 +0000 UTC Normal Pod auto-provisioned-query-54b94b8d66-pfrc4.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:13 +0000 UTC Normal Pod auto-provisioned-query-54b94b8d66-pfrc4.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:13 +0000 UTC Normal Pod auto-provisioned-query-54b94b8d66-pfrc4.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:13 +0000 UTC Normal Pod auto-provisioned-query-54b94b8d66-pfrc4.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:13 +0000 UTC Normal Pod auto-provisioned-query-54b94b8d66-pfrc4.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:14 +0000 UTC Normal Pod tracegen-76b5cd46c4-rl7q2 AddedInterface Add eth0 [10.129.2.58/23] from ovn-kubernetes logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:14 +0000 UTC Normal Pod tracegen-76b5cd46c4-rl7q2.spec.containers{tracegen} Pulling Pulling image "jaegertracing/jaeger-tracegen:1.51.0" kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:15 +0000 UTC Normal Pod tracegen-76b5cd46c4-rl7q2.spec.containers{tracegen} Pulled Successfully pulled image "jaegertracing/jaeger-tracegen:1.51.0" in 1.756s (1.756s including waiting) kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:16 +0000 UTC Normal Pod tracegen-76b5cd46c4-rl7q2.spec.containers{tracegen} Created Created container tracegen kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:16 +0000 UTC Normal Pod tracegen-76b5cd46c4-rl7q2.spec.containers{tracegen} Started Started container tracegen kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:16 +0000 UTC Normal Pod tracegen-76b5cd46c4-rl7q2.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:16 +0000 UTC Normal Pod tracegen-76b5cd46c4-rl7q2.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:16 +0000 UTC Normal Pod tracegen-76b5cd46c4-rl7q2.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:17 +0000 UTC Normal Pod auto-provisioned-collector-69f9d84747-t7v6d.spec.containers{jaeger-collector} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" in 4.391s (4.391s including waiting) kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:18 +0000 UTC Normal Pod auto-provisioned-collector-69f9d84747-t7v6d.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:18 +0000 UTC Normal Pod auto-provisioned-collector-69f9d84747-t7v6d.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:27 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:27 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:27 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:27 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:27 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:27 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:57 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:57 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod auto-provisioned-collector-69f9d84747-t7v6d horizontal-pod-autoscaler logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:57 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:57 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:39:57 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedComputeMetricsReplicas invalid metrics (1 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:44:28 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod auto-provisioned-collector-69f9d84747-t7v6d horizontal-pod-autoscaler logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | 2023-12-04 07:44:28 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-ingester of Pod auto-provisioned-ingester-57856495d6-bk46g horizontal-pod-autoscaler logger.go:42: 07:49:19 | streaming-with-autoprovisioning-autoscale | Deleting namespace: kuttl-test-deep-skunk === CONT kuttl/harness/streaming-with-tls logger.go:42: 07:49:55 | streaming-with-tls | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:49:55 | streaming-with-tls | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:49:55 | streaming-with-tls | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 07:49:55 | streaming-with-tls | Creating namespace: kuttl-test-game-walleye logger.go:42: 07:49:55 | streaming-with-tls/0-install | starting test step 0-install logger.go:42: 07:49:55 | streaming-with-tls/0-install | running command: [sh -c cd /tmp/jaeger-tests && make undeploy-kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true] logger.go:42: 07:49:55 | streaming-with-tls/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 07:49:55 | streaming-with-tls/0-install | >>>> Skiping kafka-operator undeploy logger.go:42: 07:49:55 | streaming-with-tls/0-install | kubectl delete --namespace kuttl-test-game-walleye -f tests/_build/kafka-example.yaml 2>&1 || true logger.go:42: 07:49:55 | streaming-with-tls/0-install | error: the path "tests/_build/kafka-example.yaml" does not exist logger.go:42: 07:49:55 | streaming-with-tls/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 07:49:55 | streaming-with-tls/0-install | running command: [sh -c cd /tmp/jaeger-tests && make kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true KAFKA_VERSION=0.32.0] logger.go:42: 07:49:55 | streaming-with-tls/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 07:49:55 | streaming-with-tls/0-install | >>>> Creating namespace kuttl-test-game-walleye logger.go:42: 07:49:55 | streaming-with-tls/0-install | kubectl create namespace kuttl-test-game-walleye 2>&1 | grep -v "already exists" || true logger.go:42: 07:49:55 | streaming-with-tls/0-install | >>>> Skipping kafka-operator deployment, assuming it has been installed via OperatorHub logger.go:42: 07:49:55 | streaming-with-tls/0-install | >>>> Creating namespace kuttl-test-game-walleye logger.go:42: 07:49:55 | streaming-with-tls/0-install | mkdir -p tests/_build/ logger.go:42: 07:49:55 | streaming-with-tls/0-install | kubectl create namespace kuttl-test-game-walleye 2>&1 | grep -v "already exists" || true logger.go:42: 07:49:56 | streaming-with-tls/0-install | curl --fail --location "https://raw.githubusercontent.com/strimzi/strimzi-kafka-operator/0.32.0/examples/kafka/kafka-persistent-single.yaml" --output tests/_build/kafka-example.yaml --create-dirs logger.go:42: 07:49:56 | streaming-with-tls/0-install | % Total % Received % Xferd Average Speed Time Time Time Current logger.go:42: 07:49:56 | streaming-with-tls/0-install | Dload Upload Total Spent Left Speed logger.go:42: 07:49:56 | streaming-with-tls/0-install | 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 865 100 865 0 0 4286 0 --:--:-- --:--:-- --:--:-- 4303 logger.go:42: 07:49:56 | streaming-with-tls/0-install | "sed" -i 's/size: 100Gi/size: 10Gi/g' tests/_build/kafka-example.yaml logger.go:42: 07:49:56 | streaming-with-tls/0-install | kubectl -n kuttl-test-game-walleye apply --dry-run=client -f tests/_build/kafka-example.yaml logger.go:42: 07:49:56 | streaming-with-tls/0-install | kafka.kafka.strimzi.io/my-cluster created (dry run) logger.go:42: 07:49:56 | streaming-with-tls/0-install | kubectl -n kuttl-test-game-walleye apply -f tests/_build/kafka-example.yaml 2>&1 | grep -v "already exists" || true logger.go:42: 07:49:57 | streaming-with-tls/0-install | kafka.kafka.strimzi.io/my-cluster created logger.go:42: 07:49:57 | streaming-with-tls/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 07:59:57 | streaming-with-tls/0-install | test step failed 0-install case.go:364: failed in step 0-install case.go:366: strimzipodsets.core.strimzi.io "my-cluster-zookeeper" not found logger.go:42: 07:59:57 | streaming-with-tls | streaming-with-tls events from ns kuttl-test-game-walleye: logger.go:42: 07:59:57 | streaming-with-tls | Deleting namespace: kuttl-test-game-walleye === CONT kuttl/harness/streaming-simple logger.go:42: 08:00:04 | streaming-simple | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:00:04 | streaming-simple | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:00:04 | streaming-simple | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:00:04 | streaming-simple | Creating namespace: kuttl-test-precise-bobcat logger.go:42: 08:00:04 | streaming-simple/0-install | starting test step 0-install logger.go:42: 08:00:04 | streaming-simple/0-install | running command: [sh -c cd /tmp/jaeger-tests && make undeploy-kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true] logger.go:42: 08:00:04 | streaming-simple/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 08:00:04 | streaming-simple/0-install | >>>> Skiping kafka-operator undeploy logger.go:42: 08:00:04 | streaming-simple/0-install | kubectl delete --namespace kuttl-test-precise-bobcat -f tests/_build/kafka-example.yaml 2>&1 || true logger.go:42: 08:00:04 | streaming-simple/0-install | Error from server (NotFound): error when deleting "tests/_build/kafka-example.yaml": kafkas.kafka.strimzi.io "my-cluster" not found logger.go:42: 08:00:04 | streaming-simple/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 08:00:04 | streaming-simple/0-install | running command: [sh -c cd /tmp/jaeger-tests && make kafka KAFKA_NAMESPACE=$NAMESPACE KAFKA_OLM=true KAFKA_VERSION=0.32.0] logger.go:42: 08:00:04 | streaming-simple/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 08:00:04 | streaming-simple/0-install | >>>> Creating namespace kuttl-test-precise-bobcat logger.go:42: 08:00:04 | streaming-simple/0-install | kubectl create namespace kuttl-test-precise-bobcat 2>&1 | grep -v "already exists" || true logger.go:42: 08:00:04 | streaming-simple/0-install | >>>> Skipping kafka-operator deployment, assuming it has been installed via OperatorHub logger.go:42: 08:00:04 | streaming-simple/0-install | >>>> Creating namespace kuttl-test-precise-bobcat logger.go:42: 08:00:04 | streaming-simple/0-install | mkdir -p tests/_build/ logger.go:42: 08:00:04 | streaming-simple/0-install | kubectl create namespace kuttl-test-precise-bobcat 2>&1 | grep -v "already exists" || true logger.go:42: 08:00:04 | streaming-simple/0-install | curl --fail --location "https://raw.githubusercontent.com/strimzi/strimzi-kafka-operator/0.32.0/examples/kafka/kafka-persistent-single.yaml" --output tests/_build/kafka-example.yaml --create-dirs logger.go:42: 08:00:04 | streaming-simple/0-install | % Total % Received % Xferd Average Speed Time Time Time Current logger.go:42: 08:00:04 | streaming-simple/0-install | Dload Upload Total Spent Left Speed logger.go:42: 08:00:04 | streaming-simple/0-install | 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 865 100 865 0 0 6979 0 --:--:-- --:--:-- --:--:-- 6975 100 865 100 865 0 0 6977 0 --:--:-- --:--:-- --:--:-- 6975 logger.go:42: 08:00:04 | streaming-simple/0-install | "sed" -i 's/size: 100Gi/size: 10Gi/g' tests/_build/kafka-example.yaml logger.go:42: 08:00:04 | streaming-simple/0-install | kubectl -n kuttl-test-precise-bobcat apply --dry-run=client -f tests/_build/kafka-example.yaml logger.go:42: 08:00:04 | streaming-simple/0-install | kafka.kafka.strimzi.io/my-cluster created (dry run) logger.go:42: 08:00:04 | streaming-simple/0-install | kubectl -n kuttl-test-precise-bobcat apply -f tests/_build/kafka-example.yaml 2>&1 | grep -v "already exists" || true logger.go:42: 08:00:05 | streaming-simple/0-install | kafka.kafka.strimzi.io/my-cluster created logger.go:42: 08:00:05 | streaming-simple/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 08:10:06 | streaming-simple/0-install | test step failed 0-install case.go:364: failed in step 0-install case.go:366: strimzipodsets.core.strimzi.io "my-cluster-zookeeper" not found logger.go:42: 08:10:06 | streaming-simple | streaming-simple events from ns kuttl-test-precise-bobcat: logger.go:42: 08:10:06 | streaming-simple | Deleting namespace: kuttl-test-precise-bobcat === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- FAIL: kuttl (1990.52s) --- FAIL: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.11s) --- FAIL: kuttl/harness/streaming-with-autoprovisioning-autoscale (767.61s) --- FAIL: kuttl/harness/streaming-with-tls (608.35s) --- FAIL: kuttl/harness/streaming-simple (608.29s) FAIL + exit_code=1 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name streaming --report --output /logs/artifacts/streaming.xml ./artifacts/kuttl-report.xml time="2023-12-04T08:10:13Z" level=debug msg="Setting a new name for the test suites" time="2023-12-04T08:10:13Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-04T08:10:13Z" level=debug msg="normalizing test case names" time="2023-12-04T08:10:13Z" level=debug msg="streaming/artifacts -> streaming_artifacts" time="2023-12-04T08:10:13Z" level=debug msg="streaming/streaming-with-autoprovisioning-autoscale -> streaming_streaming_with_autoprovisioning_autoscale" time="2023-12-04T08:10:13Z" level=debug msg="streaming/streaming-with-tls -> streaming_streaming_with_tls" time="2023-12-04T08:10:13Z" level=debug msg="streaming/streaming-simple -> streaming_streaming_simple" +-----------------------------------------------------+--------+ | NAME | RESULT | +-----------------------------------------------------+--------+ | streaming_artifacts | passed | | streaming_streaming_with_autoprovisioning_autoscale | failed | | streaming_streaming_with_tls | failed | | streaming_streaming_simple | failed | +-----------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh ui false true + '[' 3 -ne 3 ']' + test_suite_name=ui + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/ui.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-ui make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true ./tests/e2e/ui/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-02-123536 True False 92m Cluster version is 4.15.0-0.nightly-2023-12-02-123536' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-02-123536 True False 92m Cluster version is 4.15.0-0.nightly-2023-12-02-123536' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/ui/render.sh ++ export SUITE_DIR=./tests/e2e/ui ++ SUITE_DIR=./tests/e2e/ui ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/ui ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test allinone + '[' 1 -ne 1 ']' + test_name=allinone + echo =========================================================================== =========================================================================== + info 'Rendering files for test allinone' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test allinone\e[0m' Rendering files for test allinone + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build + '[' _build '!=' _build ']' + mkdir -p allinone + cd allinone + export GET_URL_COMMAND + export URL + export JAEGER_NAME=all-in-one-ui + JAEGER_NAME=all-in-one-ui + '[' true = true ']' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./01-curl.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./04-test-ui-config.yaml + start_test production + '[' 1 -ne 1 ']' + test_name=production + echo =========================================================================== =========================================================================== + info 'Rendering files for test production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test production\e[0m' Rendering files for test production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build/allinone + '[' allinone '!=' _build ']' + cd .. + mkdir -p production + cd production + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + [[ true = true ]] + [[ true = true ]] + render_install_jaeger production-ui production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + '[' true = true ']' + INSECURE=true + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-forbbiden-access.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-curl.yaml + INSECURE=true + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./05-check-disabled-security.yaml + ASSERT_PRESENT=false + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./06-check-NO-gaID.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./08-check-gaID.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running ui E2E tests' Running ui E2E tests + cd tests/e2e/ui/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3107041029 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 3 tests === RUN kuttl/harness === RUN kuttl/harness/allinone === PAUSE kuttl/harness/allinone === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/production === PAUSE kuttl/harness/production === CONT kuttl/harness/allinone logger.go:42: 08:10:20 | allinone | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:10:20 | allinone | Creating namespace: kuttl-test-patient-labrador logger.go:42: 08:10:20 | allinone/0-install | starting test step 0-install logger.go:42: 08:10:20 | allinone/0-install | Jaeger:kuttl-test-patient-labrador/all-in-one-ui created logger.go:42: 08:10:24 | allinone/0-install | test step completed 0-install logger.go:42: 08:10:24 | allinone/1-curl | starting test step 1-curl logger.go:42: 08:10:24 | allinone/1-curl | running command: [./ensure-ingress-host.sh] logger.go:42: 08:10:24 | allinone/1-curl | Checking the Ingress host value was populated logger.go:42: 08:10:24 | allinone/1-curl | Try number 0 logger.go:42: 08:10:24 | allinone/1-curl | Hostname is all-in-one-ui-kuttl-test-patient-labrador.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com logger.go:42: 08:10:24 | allinone/1-curl | running command: [sh -c ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE all-in-one-ui] logger.go:42: 08:10:24 | allinone/1-curl | Checking an expected HTTP response logger.go:42: 08:10:24 | allinone/1-curl | Running in OpenShift logger.go:42: 08:10:24 | allinone/1-curl | User not provided. Getting the token... logger.go:42: 08:10:26 | allinone/1-curl | Warning: resource jaegers/all-in-one-ui is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:10:32 | allinone/1-curl | Try number 1/30 the https://all-in-one-ui-kuttl-test-patient-labrador.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:10:32 | allinone/1-curl | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 08:10:32 | allinone/1-curl | Try number 2/30 the https://all-in-one-ui-kuttl-test-patient-labrador.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:10:33 | allinone/1-curl | HTTP response is 503. 200 expected. Waiting 10 s logger.go:42: 08:10:43 | allinone/1-curl | Try number 3/30 the https://all-in-one-ui-kuttl-test-patient-labrador.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:10:43 | allinone/1-curl | curl response asserted properly logger.go:42: 08:10:43 | allinone/1-curl | test step completed 1-curl logger.go:42: 08:10:43 | allinone/2-delete | starting test step 2-delete logger.go:42: 08:10:44 | allinone/2-delete | Jaeger:kuttl-test-patient-labrador/all-in-one-ui created logger.go:42: 08:10:44 | allinone/2-delete | test step completed 2-delete logger.go:42: 08:10:44 | allinone/3-install | starting test step 3-install logger.go:42: 08:10:44 | allinone/3-install | Jaeger:kuttl-test-patient-labrador/all-in-one-ui updated logger.go:42: 08:10:44 | allinone/3-install | test step completed 3-install logger.go:42: 08:10:44 | allinone/4-test-ui-config | starting test step 4-test-ui-config logger.go:42: 08:10:44 | allinone/4-test-ui-config | running command: [./ensure-ingress-host.sh] logger.go:42: 08:10:44 | allinone/4-test-ui-config | Checking the Ingress host value was populated logger.go:42: 08:10:44 | allinone/4-test-ui-config | Try number 0 logger.go:42: 08:10:44 | allinone/4-test-ui-config | error: error executing jsonpath "{.items[0].status.ingress[0].host}": Error executing template: array index out of bounds: index 0, length 0. Printing more information for debugging the template: logger.go:42: 08:10:44 | allinone/4-test-ui-config | template was: logger.go:42: 08:10:44 | allinone/4-test-ui-config | {.items[0].status.ingress[0].host} logger.go:42: 08:10:44 | allinone/4-test-ui-config | object given to jsonpath engine was: logger.go:42: 08:10:44 | allinone/4-test-ui-config | map[string]interface {}{"apiVersion":"v1", "items":[]interface {}{}, "kind":"List", "metadata":map[string]interface {}{"resourceVersion":""}} logger.go:42: 08:10:44 | allinone/4-test-ui-config | logger.go:42: 08:10:44 | allinone/4-test-ui-config | logger.go:42: 08:10:54 | allinone/4-test-ui-config | Try number 1 logger.go:42: 08:10:54 | allinone/4-test-ui-config | Hostname is all-in-one-ui-kuttl-test-patient-labrador.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com logger.go:42: 08:10:54 | allinone/4-test-ui-config | running command: [sh -c ASSERT_PRESENT=true EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 08:10:55 | allinone/4-test-ui-config | time="2023-12-04T08:10:55Z" level=info msg="Querying https://all-in-one-ui-kuttl-test-patient-labrador.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search..." logger.go:42: 08:10:55 | allinone/4-test-ui-config | time="2023-12-04T08:10:55Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 08:10:55 | allinone/4-test-ui-config | time="2023-12-04T08:10:55Z" level=info msg="Polling to https://all-in-one-ui-kuttl-test-patient-labrador.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search" logger.go:42: 08:10:55 | allinone/4-test-ui-config | time="2023-12-04T08:10:55Z" level=info msg="Doing request number 0" logger.go:42: 08:10:55 | allinone/4-test-ui-config | time="2023-12-04T08:10:55Z" level=info msg="Content found and asserted!" logger.go:42: 08:10:55 | allinone/4-test-ui-config | time="2023-12-04T08:10:55Z" level=info msg="Success!" logger.go:42: 08:10:55 | allinone/4-test-ui-config | test step completed 4-test-ui-config logger.go:42: 08:10:55 | allinone | allinone events from ns kuttl-test-patient-labrador: logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:23 +0000 UTC Normal Pod all-in-one-ui-cd4f66f55-hgwgm Binding Scheduled Successfully assigned kuttl-test-patient-labrador/all-in-one-ui-cd4f66f55-hgwgm to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:23 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-cd4f66f55 SuccessfulCreate Created pod: all-in-one-ui-cd4f66f55-hgwgm replicaset-controller logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:23 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-cd4f66f55 to 1 deployment-controller logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:24 +0000 UTC Normal Pod all-in-one-ui-cd4f66f55-hgwgm AddedInterface Add eth0 [10.128.2.120/23] from ovn-kubernetes logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:24 +0000 UTC Normal Pod all-in-one-ui-cd4f66f55-hgwgm.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:24 +0000 UTC Normal Pod all-in-one-ui-cd4f66f55-hgwgm.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:24 +0000 UTC Normal Pod all-in-one-ui-cd4f66f55-hgwgm.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:24 +0000 UTC Normal Pod all-in-one-ui-cd4f66f55-hgwgm.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:24 +0000 UTC Normal Pod all-in-one-ui-cd4f66f55-hgwgm.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:24 +0000 UTC Normal Pod all-in-one-ui-cd4f66f55-hgwgm.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:28 +0000 UTC Normal Pod all-in-one-ui-cd4f66f55-hgwgm.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:28 +0000 UTC Normal Pod all-in-one-ui-cd4f66f55-hgwgm.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:28 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-cd4f66f55 SuccessfulDelete Deleted pod: all-in-one-ui-cd4f66f55-hgwgm replicaset-controller logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:28 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled down replica set all-in-one-ui-cd4f66f55 to 0 from 1 deployment-controller logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:29 +0000 UTC Normal Pod all-in-one-ui-77c6df46b5-brmjl Binding Scheduled Successfully assigned kuttl-test-patient-labrador/all-in-one-ui-77c6df46b5-brmjl to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:29 +0000 UTC Normal Pod all-in-one-ui-77c6df46b5-brmjl AddedInterface Add eth0 [10.128.2.121/23] from ovn-kubernetes logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:29 +0000 UTC Normal Pod all-in-one-ui-77c6df46b5-brmjl.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:29 +0000 UTC Normal Pod all-in-one-ui-77c6df46b5-brmjl.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:29 +0000 UTC Normal Pod all-in-one-ui-77c6df46b5-brmjl.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:29 +0000 UTC Normal Pod all-in-one-ui-77c6df46b5-brmjl.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:29 +0000 UTC Normal Pod all-in-one-ui-77c6df46b5-brmjl.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:29 +0000 UTC Normal Pod all-in-one-ui-77c6df46b5-brmjl.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:29 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-77c6df46b5 SuccessfulCreate Created pod: all-in-one-ui-77c6df46b5-brmjl replicaset-controller logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:29 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-77c6df46b5 to 1 deployment-controller logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:44 +0000 UTC Normal Pod all-in-one-ui-77c6df46b5-brmjl.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:44 +0000 UTC Normal Pod all-in-one-ui-77c6df46b5-brmjl.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:50 +0000 UTC Normal Pod all-in-one-ui-85b495847f-ljhjn Binding Scheduled Successfully assigned kuttl-test-patient-labrador/all-in-one-ui-85b495847f-ljhjn to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:50 +0000 UTC Normal Pod all-in-one-ui-85b495847f-ljhjn AddedInterface Add eth0 [10.128.2.122/23] from ovn-kubernetes logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:50 +0000 UTC Normal Pod all-in-one-ui-85b495847f-ljhjn.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:e724963dd365f319fdfaaa6159b16227b5744d8a6700974bdd9dfe4ddf40a580" already present on machine kubelet logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:50 +0000 UTC Normal Pod all-in-one-ui-85b495847f-ljhjn.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:50 +0000 UTC Normal Pod all-in-one-ui-85b495847f-ljhjn.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:50 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-85b495847f SuccessfulCreate Created pod: all-in-one-ui-85b495847f-ljhjn replicaset-controller logger.go:42: 08:10:55 | allinone | 2023-12-04 08:10:50 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-85b495847f to 1 deployment-controller logger.go:42: 08:10:55 | allinone | Deleting namespace: kuttl-test-patient-labrador === CONT kuttl/harness/production logger.go:42: 08:11:01 | production | Ignoring add-tracking-id.yaml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:11:01 | production | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 08:11:01 | production | Creating namespace: kuttl-test-magnetic-falcon logger.go:42: 08:11:02 | production/1-install | starting test step 1-install logger.go:42: 08:11:02 | production/1-install | Jaeger:kuttl-test-magnetic-falcon/production-ui created logger.go:42: 08:11:39 | production/1-install | test step completed 1-install logger.go:42: 08:11:39 | production/2-check-forbbiden-access | starting test step 2-check-forbbiden-access logger.go:42: 08:11:39 | production/2-check-forbbiden-access | running command: [./ensure-ingress-host.sh] logger.go:42: 08:11:39 | production/2-check-forbbiden-access | Checking the Ingress host value was populated logger.go:42: 08:11:39 | production/2-check-forbbiden-access | Try number 0 logger.go:42: 08:11:39 | production/2-check-forbbiden-access | Hostname is production-ui-kuttl-test-magnetic-falcon.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com logger.go:42: 08:11:39 | production/2-check-forbbiden-access | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE production-ui] logger.go:42: 08:11:39 | production/2-check-forbbiden-access | Checking an expected HTTP response logger.go:42: 08:11:39 | production/2-check-forbbiden-access | Running in OpenShift logger.go:42: 08:11:39 | production/2-check-forbbiden-access | Not using any secret logger.go:42: 08:11:39 | production/2-check-forbbiden-access | Try number 1/30 the https://production-ui-kuttl-test-magnetic-falcon.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:11:39 | production/2-check-forbbiden-access | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 08:11:39 | production/2-check-forbbiden-access | Try number 2/30 the https://production-ui-kuttl-test-magnetic-falcon.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:11:39 | production/2-check-forbbiden-access | HTTP response is 503. 403 expected. Waiting 10 s logger.go:42: 08:11:49 | production/2-check-forbbiden-access | Try number 3/30 the https://production-ui-kuttl-test-magnetic-falcon.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:11:49 | production/2-check-forbbiden-access | curl response asserted properly logger.go:42: 08:11:49 | production/2-check-forbbiden-access | test step completed 2-check-forbbiden-access logger.go:42: 08:11:49 | production/3-curl | starting test step 3-curl logger.go:42: 08:11:49 | production/3-curl | running command: [./ensure-ingress-host.sh] logger.go:42: 08:11:49 | production/3-curl | Checking the Ingress host value was populated logger.go:42: 08:11:49 | production/3-curl | Try number 0 logger.go:42: 08:11:49 | production/3-curl | Hostname is production-ui-kuttl-test-magnetic-falcon.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com logger.go:42: 08:11:49 | production/3-curl | running command: [sh -c ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE production-ui] logger.go:42: 08:11:50 | production/3-curl | Checking an expected HTTP response logger.go:42: 08:11:50 | production/3-curl | Running in OpenShift logger.go:42: 08:11:50 | production/3-curl | User not provided. Getting the token... logger.go:42: 08:11:51 | production/3-curl | Warning: resource jaegers/production-ui is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 08:11:58 | production/3-curl | Try number 1/30 the https://production-ui-kuttl-test-magnetic-falcon.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:11:58 | production/3-curl | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 08:11:58 | production/3-curl | Try number 2/30 the https://production-ui-kuttl-test-magnetic-falcon.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:11:58 | production/3-curl | curl response asserted properly logger.go:42: 08:11:58 | production/3-curl | test step completed 3-curl logger.go:42: 08:11:58 | production/4-install | starting test step 4-install logger.go:42: 08:11:58 | production/4-install | Jaeger:kuttl-test-magnetic-falcon/production-ui updated logger.go:42: 08:11:58 | production/4-install | test step completed 4-install logger.go:42: 08:11:58 | production/5-check-disabled-security | starting test step 5-check-disabled-security logger.go:42: 08:11:58 | production/5-check-disabled-security | running command: [./ensure-ingress-host.sh] logger.go:42: 08:11:58 | production/5-check-disabled-security | Checking the Ingress host value was populated logger.go:42: 08:11:58 | production/5-check-disabled-security | Try number 0 logger.go:42: 08:11:58 | production/5-check-disabled-security | Hostname is production-ui-kuttl-test-magnetic-falcon.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com logger.go:42: 08:11:58 | production/5-check-disabled-security | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE production-ui] logger.go:42: 08:11:58 | production/5-check-disabled-security | Checking an expected HTTP response logger.go:42: 08:11:58 | production/5-check-disabled-security | Running in OpenShift logger.go:42: 08:11:58 | production/5-check-disabled-security | Not using any secret logger.go:42: 08:11:58 | production/5-check-disabled-security | Try number 1/30 the https://production-ui-kuttl-test-magnetic-falcon.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:11:58 | production/5-check-disabled-security | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 08:11:58 | production/5-check-disabled-security | Try number 2/30 the https://production-ui-kuttl-test-magnetic-falcon.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:11:58 | production/5-check-disabled-security | HTTP response is 403. 200 expected. Waiting 10 s logger.go:42: 08:12:08 | production/5-check-disabled-security | Try number 3/30 the https://production-ui-kuttl-test-magnetic-falcon.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search logger.go:42: 08:12:08 | production/5-check-disabled-security | curl response asserted properly logger.go:42: 08:12:08 | production/5-check-disabled-security | test step completed 5-check-disabled-security logger.go:42: 08:12:08 | production/6-check-NO-gaID | starting test step 6-check-NO-gaID logger.go:42: 08:12:08 | production/6-check-NO-gaID | running command: [./ensure-ingress-host.sh] logger.go:42: 08:12:08 | production/6-check-NO-gaID | Checking the Ingress host value was populated logger.go:42: 08:12:08 | production/6-check-NO-gaID | Try number 0 logger.go:42: 08:12:09 | production/6-check-NO-gaID | Hostname is production-ui-kuttl-test-magnetic-falcon.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com logger.go:42: 08:12:09 | production/6-check-NO-gaID | running command: [sh -c ASSERT_PRESENT=false EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 08:12:09 | production/6-check-NO-gaID | time="2023-12-04T08:12:09Z" level=info msg="Querying https://production-ui-kuttl-test-magnetic-falcon.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search..." logger.go:42: 08:12:09 | production/6-check-NO-gaID | time="2023-12-04T08:12:09Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 08:12:09 | production/6-check-NO-gaID | time="2023-12-04T08:12:09Z" level=info msg="Polling to https://production-ui-kuttl-test-magnetic-falcon.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search" logger.go:42: 08:12:09 | production/6-check-NO-gaID | time="2023-12-04T08:12:09Z" level=info msg="Doing request number 0" logger.go:42: 08:12:09 | production/6-check-NO-gaID | time="2023-12-04T08:12:09Z" level=info msg="Content not found and asserted it was not found!" logger.go:42: 08:12:09 | production/6-check-NO-gaID | time="2023-12-04T08:12:09Z" level=info msg="Success!" logger.go:42: 08:12:09 | production/6-check-NO-gaID | test step completed 6-check-NO-gaID logger.go:42: 08:12:09 | production/7-add-tracking-id | starting test step 7-add-tracking-id logger.go:42: 08:12:09 | production/7-add-tracking-id | running command: [sh -c kubectl apply -f add-tracking-id.yaml -n $NAMESPACE] logger.go:42: 08:12:10 | production/7-add-tracking-id | jaeger.jaegertracing.io/production-ui configured logger.go:42: 08:12:10 | production/7-add-tracking-id | test step completed 7-add-tracking-id logger.go:42: 08:12:10 | production/8-check-gaID | starting test step 8-check-gaID logger.go:42: 08:12:10 | production/8-check-gaID | running command: [./ensure-ingress-host.sh] logger.go:42: 08:12:10 | production/8-check-gaID | Checking the Ingress host value was populated logger.go:42: 08:12:10 | production/8-check-gaID | Try number 0 logger.go:42: 08:12:10 | production/8-check-gaID | Hostname is production-ui-kuttl-test-magnetic-falcon.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com logger.go:42: 08:12:10 | production/8-check-gaID | running command: [sh -c ASSERT_PRESENT=true EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 08:12:10 | production/8-check-gaID | time="2023-12-04T08:12:10Z" level=info msg="Querying https://production-ui-kuttl-test-magnetic-falcon.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search..." logger.go:42: 08:12:10 | production/8-check-gaID | time="2023-12-04T08:12:10Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 08:12:10 | production/8-check-gaID | time="2023-12-04T08:12:10Z" level=info msg="Polling to https://production-ui-kuttl-test-magnetic-falcon.apps.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com/search" logger.go:42: 08:12:10 | production/8-check-gaID | time="2023-12-04T08:12:10Z" level=info msg="Doing request number 0" logger.go:42: 08:12:10 | production/8-check-gaID | time="2023-12-04T08:12:10Z" level=warning msg="Found: false . Assert: true" logger.go:42: 08:12:10 | production/8-check-gaID | time="2023-12-04T08:12:10Z" level=warning msg="The condition of the test function was not accomplished" logger.go:42: 08:12:10 | production/8-check-gaID | time="2023-12-04T08:12:10Z" level=info msg="Doing request number 1" logger.go:42: 08:12:10 | production/8-check-gaID | time="2023-12-04T08:12:10Z" level=warning msg="Found: false . Assert: true" logger.go:42: 08:12:10 | production/8-check-gaID | time="2023-12-04T08:12:10Z" level=warning msg="The condition of the test function was not accomplished" logger.go:42: 08:12:18 | production/8-check-gaID | time="2023-12-04T08:12:18Z" level=info msg="Doing request number 2" logger.go:42: 08:12:18 | production/8-check-gaID | time="2023-12-04T08:12:18Z" level=info msg="Content found and asserted!" logger.go:42: 08:12:18 | production/8-check-gaID | time="2023-12-04T08:12:18Z" level=info msg="Success!" logger.go:42: 08:12:18 | production/8-check-gaID | test step completed 8-check-gaID logger.go:42: 08:12:18 | production | production events from ns kuttl-test-magnetic-falcon: logger.go:42: 08:12:18 | production | 2023-12-04 08:11:08 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmagneticfalconproductionui-1-77222qj Binding Scheduled Successfully assigned kuttl-test-magnetic-falcon/elasticsearch-cdm-kuttltestmagneticfalconproductionui-1-77222qj to ip-10-0-97-5.us-east-2.compute.internal default-scheduler logger.go:42: 08:12:18 | production | 2023-12-04 08:11:08 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestmagneticfalconproductionui-1-77222qj FailedMount MountVolume.SetUp failed for volume "elasticsearch-metrics" : secret "elasticsearch-metrics" not found kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:08 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestmagneticfalconproductionui-1-7787b6fd98 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestmagneticfalconproductionui-1-77222qj replicaset-controller logger.go:42: 08:12:18 | production | 2023-12-04 08:11:08 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestmagneticfalconproductionui-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestmagneticfalconproductionui-1-7787b6fd98 to 1 deployment-controller logger.go:42: 08:12:18 | production | 2023-12-04 08:11:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmagneticfalconproductionui-1-77222qj AddedInterface Add eth0 [10.129.2.60/23] from ovn-kubernetes logger.go:42: 08:12:18 | production | 2023-12-04 08:11:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmagneticfalconproductionui-1-77222qj.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:56ea62bfb0ca36e19a7b21aff3676e49511f05f72da5e76d6427fd8240f328a8" already present on machine kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmagneticfalconproductionui-1-77222qj.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmagneticfalconproductionui-1-77222qj.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmagneticfalconproductionui-1-77222qj.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:79427bea6b5c37894b9782c3821d8b9074838e606daa4a743b2ae060856fa98a" already present on machine kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmagneticfalconproductionui-1-77222qj.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmagneticfalconproductionui-1-77222qj.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:19 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestmagneticfalconproductionui-1-77222qj.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:24 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestmagneticfalconproductionui-1-77222qj.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:35 +0000 UTC Normal Pod production-ui-collector-5b7c4bd9bb-xfdhm Binding Scheduled Successfully assigned kuttl-test-magnetic-falcon/production-ui-collector-5b7c4bd9bb-xfdhm to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 08:12:18 | production | 2023-12-04 08:11:35 +0000 UTC Warning Pod production-ui-collector-5b7c4bd9bb-xfdhm FailedMount MountVolume.SetUp failed for volume "production-ui-collector-tls-config-volume" : secret "production-ui-collector-headless-tls" not found kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:35 +0000 UTC Normal ReplicaSet.apps production-ui-collector-5b7c4bd9bb SuccessfulCreate Created pod: production-ui-collector-5b7c4bd9bb-xfdhm replicaset-controller logger.go:42: 08:12:18 | production | 2023-12-04 08:11:35 +0000 UTC Normal Deployment.apps production-ui-collector ScalingReplicaSet Scaled up replica set production-ui-collector-5b7c4bd9bb to 1 deployment-controller logger.go:42: 08:12:18 | production | 2023-12-04 08:11:35 +0000 UTC Normal Pod production-ui-query-7696f5b79f-2vdbt Binding Scheduled Successfully assigned kuttl-test-magnetic-falcon/production-ui-query-7696f5b79f-2vdbt to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 08:12:18 | production | 2023-12-04 08:11:35 +0000 UTC Normal Pod production-ui-query-7696f5b79f-2vdbt AddedInterface Add eth0 [10.128.2.124/23] from ovn-kubernetes logger.go:42: 08:12:18 | production | 2023-12-04 08:11:35 +0000 UTC Normal Pod production-ui-query-7696f5b79f-2vdbt.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:35 +0000 UTC Normal Pod production-ui-query-7696f5b79f-2vdbt.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:35 +0000 UTC Normal Pod production-ui-query-7696f5b79f-2vdbt.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:35 +0000 UTC Normal Pod production-ui-query-7696f5b79f-2vdbt.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:35 +0000 UTC Normal ReplicaSet.apps production-ui-query-7696f5b79f SuccessfulCreate Created pod: production-ui-query-7696f5b79f-2vdbt replicaset-controller logger.go:42: 08:12:18 | production | 2023-12-04 08:11:35 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-7696f5b79f to 1 deployment-controller logger.go:42: 08:12:18 | production | 2023-12-04 08:11:36 +0000 UTC Normal Pod production-ui-collector-5b7c4bd9bb-xfdhm AddedInterface Add eth0 [10.128.2.123/23] from ovn-kubernetes logger.go:42: 08:12:18 | production | 2023-12-04 08:11:36 +0000 UTC Normal Pod production-ui-collector-5b7c4bd9bb-xfdhm.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:c12b97adae3dabdbb374df36766adf977dc61193def990d90c751c445d89d856" already present on machine kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:36 +0000 UTC Normal Pod production-ui-collector-5b7c4bd9bb-xfdhm.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:36 +0000 UTC Normal Pod production-ui-collector-5b7c4bd9bb-xfdhm.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:36 +0000 UTC Normal Pod production-ui-query-7696f5b79f-2vdbt.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:36 +0000 UTC Normal Pod production-ui-query-7696f5b79f-2vdbt.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:36 +0000 UTC Normal Pod production-ui-query-7696f5b79f-2vdbt.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:36 +0000 UTC Normal Pod production-ui-query-7696f5b79f-2vdbt.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:36 +0000 UTC Normal Pod production-ui-query-7696f5b79f-2vdbt.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:50 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:12:18 | production | 2023-12-04 08:11:50 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:12:18 | production | 2023-12-04 08:11:50 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 08:12:18 | production | 2023-12-04 08:11:53 +0000 UTC Normal Pod production-ui-query-5f97897c77-n72g2 Binding Scheduled Successfully assigned kuttl-test-magnetic-falcon/production-ui-query-5f97897c77-n72g2 to ip-10-0-72-35.us-east-2.compute.internal default-scheduler logger.go:42: 08:12:18 | production | 2023-12-04 08:11:53 +0000 UTC Normal Pod production-ui-query-5f97897c77-n72g2 AddedInterface Add eth0 [10.128.2.125/23] from ovn-kubernetes logger.go:42: 08:12:18 | production | 2023-12-04 08:11:53 +0000 UTC Normal Pod production-ui-query-5f97897c77-n72g2.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:53 +0000 UTC Normal ReplicaSet.apps production-ui-query-5f97897c77 SuccessfulCreate Created pod: production-ui-query-5f97897c77-n72g2 replicaset-controller logger.go:42: 08:12:18 | production | 2023-12-04 08:11:53 +0000 UTC Normal Pod production-ui-query-7696f5b79f-2vdbt.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:53 +0000 UTC Normal Pod production-ui-query-7696f5b79f-2vdbt.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:53 +0000 UTC Normal Pod production-ui-query-7696f5b79f-2vdbt.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:53 +0000 UTC Normal ReplicaSet.apps production-ui-query-7696f5b79f SuccessfulDelete Deleted pod: production-ui-query-7696f5b79f-2vdbt replicaset-controller logger.go:42: 08:12:18 | production | 2023-12-04 08:11:53 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-7696f5b79f to 0 from 1 deployment-controller logger.go:42: 08:12:18 | production | 2023-12-04 08:11:53 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-5f97897c77 to 1 deployment-controller logger.go:42: 08:12:18 | production | 2023-12-04 08:11:54 +0000 UTC Normal Pod production-ui-query-5f97897c77-n72g2.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:54 +0000 UTC Normal Pod production-ui-query-5f97897c77-n72g2.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:54 +0000 UTC Normal Pod production-ui-query-5f97897c77-n72g2.spec.containers{oauth-proxy} Pulled Container image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:2b863e1a163efa3754142f94f3e62a63568e90050f3b1bec039189afb423a04b" already present on machine kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:54 +0000 UTC Normal Pod production-ui-query-5f97897c77-n72g2.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:54 +0000 UTC Normal Pod production-ui-query-5f97897c77-n72g2.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:54 +0000 UTC Normal Pod production-ui-query-5f97897c77-n72g2.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:54 +0000 UTC Normal Pod production-ui-query-5f97897c77-n72g2.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:54 +0000 UTC Normal Pod production-ui-query-5f97897c77-n72g2.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:59 +0000 UTC Normal Pod production-ui-query-5f97897c77-n72g2.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:59 +0000 UTC Normal Pod production-ui-query-5f97897c77-n72g2.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:59 +0000 UTC Normal Pod production-ui-query-5f97897c77-n72g2.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:11:59 +0000 UTC Normal ReplicaSet.apps production-ui-query-5f97897c77 SuccessfulDelete Deleted pod: production-ui-query-5f97897c77-n72g2 replicaset-controller logger.go:42: 08:12:18 | production | 2023-12-04 08:11:59 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-5f97897c77 to 0 from 1 deployment-controller logger.go:42: 08:12:18 | production | 2023-12-04 08:12:00 +0000 UTC Normal Pod production-ui-query-5df98f5569-z55m2 Binding Scheduled Successfully assigned kuttl-test-magnetic-falcon/production-ui-query-5df98f5569-z55m2 to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 08:12:18 | production | 2023-12-04 08:12:00 +0000 UTC Normal ReplicaSet.apps production-ui-query-5df98f5569 SuccessfulCreate Created pod: production-ui-query-5df98f5569-z55m2 replicaset-controller logger.go:42: 08:12:18 | production | 2023-12-04 08:12:00 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-5df98f5569 to 1 deployment-controller logger.go:42: 08:12:18 | production | 2023-12-04 08:12:01 +0000 UTC Normal Pod production-ui-query-5df98f5569-z55m2 AddedInterface Add eth0 [10.131.0.88/23] from ovn-kubernetes logger.go:42: 08:12:18 | production | 2023-12-04 08:12:01 +0000 UTC Normal Pod production-ui-query-5df98f5569-z55m2.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:12:01 +0000 UTC Normal Pod production-ui-query-5df98f5569-z55m2.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:12:01 +0000 UTC Normal Pod production-ui-query-5df98f5569-z55m2.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:12:01 +0000 UTC Normal Pod production-ui-query-5df98f5569-z55m2.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:12:01 +0000 UTC Normal Pod production-ui-query-5df98f5569-z55m2.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:12:01 +0000 UTC Normal Pod production-ui-query-5df98f5569-z55m2.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:12:11 +0000 UTC Normal Pod production-ui-query-59dc87c476-27qrt Binding Scheduled Successfully assigned kuttl-test-magnetic-falcon/production-ui-query-59dc87c476-27qrt to ip-10-0-4-161.us-east-2.compute.internal default-scheduler logger.go:42: 08:12:18 | production | 2023-12-04 08:12:11 +0000 UTC Normal ReplicaSet.apps production-ui-query-59dc87c476 SuccessfulCreate Created pod: production-ui-query-59dc87c476-27qrt replicaset-controller logger.go:42: 08:12:18 | production | 2023-12-04 08:12:11 +0000 UTC Normal Pod production-ui-query-5df98f5569-z55m2.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:12:11 +0000 UTC Normal Pod production-ui-query-5df98f5569-z55m2.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:12:11 +0000 UTC Normal ReplicaSet.apps production-ui-query-5df98f5569 SuccessfulDelete Deleted pod: production-ui-query-5df98f5569-z55m2 replicaset-controller logger.go:42: 08:12:18 | production | 2023-12-04 08:12:11 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-5df98f5569 to 0 from 1 deployment-controller logger.go:42: 08:12:18 | production | 2023-12-04 08:12:11 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-59dc87c476 to 1 deployment-controller logger.go:42: 08:12:18 | production | 2023-12-04 08:12:12 +0000 UTC Normal Pod production-ui-query-59dc87c476-27qrt AddedInterface Add eth0 [10.131.0.89/23] from ovn-kubernetes logger.go:42: 08:12:18 | production | 2023-12-04 08:12:12 +0000 UTC Normal Pod production-ui-query-59dc87c476-27qrt.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a07706a058469605ebbfaef01660e2b77ad064721615a2e37bb5c7c6aad1156f" already present on machine kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:12:12 +0000 UTC Normal Pod production-ui-query-59dc87c476-27qrt.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:12:12 +0000 UTC Normal Pod production-ui-query-59dc87c476-27qrt.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:12:12 +0000 UTC Normal Pod production-ui-query-59dc87c476-27qrt.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:b99b1666405ba55c81bdc1f4cb385228fd513f94f7b74881b78e3cb7f63da937" already present on machine kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:12:12 +0000 UTC Normal Pod production-ui-query-59dc87c476-27qrt.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 08:12:18 | production | 2023-12-04 08:12:12 +0000 UTC Normal Pod production-ui-query-59dc87c476-27qrt.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 08:12:18 | production | Deleting namespace: kuttl-test-magnetic-falcon === CONT kuttl/harness/artifacts logger.go:42: 08:12:25 | artifacts | Creating namespace: kuttl-test-rare-chipmunk logger.go:42: 08:12:26 | artifacts | artifacts events from ns kuttl-test-rare-chipmunk: logger.go:42: 08:12:26 | artifacts | Deleting namespace: kuttl-test-rare-chipmunk === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (132.06s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/allinone (41.78s) --- PASS: kuttl/harness/production (83.99s) --- PASS: kuttl/harness/artifacts (6.13s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name ui --report --output /logs/artifacts/ui.xml ./artifacts/kuttl-report.xml time="2023-12-04T08:12:32Z" level=debug msg="Setting a new name for the test suites" time="2023-12-04T08:12:32Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-04T08:12:32Z" level=debug msg="normalizing test case names" time="2023-12-04T08:12:32Z" level=debug msg="ui/allinone -> ui_allinone" time="2023-12-04T08:12:32Z" level=debug msg="ui/production -> ui_production" time="2023-12-04T08:12:32Z" level=debug msg="ui/artifacts -> ui_artifacts" +---------------+--------+ | NAME | RESULT | +---------------+--------+ | ui_allinone | passed | | ui_production | passed | | ui_artifacts | passed | +---------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh upgrade false true + '[' 3 -ne 3 ']' + test_suite_name=upgrade + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/upgrade.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-upgrade make[2]: Entering directory '/tmp/jaeger-tests' make docker JAEGER_VERSION=1.51.1 IMG="quay.io//jaeger-operator:next" make[3]: Entering directory '/tmp/jaeger-tests' [ ! -z "true" ] || docker build --build-arg=GOPROXY= --build-arg=VERSION="1.51.0" --build-arg=JAEGER_VERSION=1.51.1 --build-arg=TARGETARCH= --build-arg VERSION_DATE=2023-12-04T08:12:32Z --build-arg VERSION_PKG="github.com/jaegertracing/jaeger-operator/pkg/version" -t "quay.io//jaeger-operator:next" . make[3]: Leaving directory '/tmp/jaeger-tests' touch build-e2e-upgrade-image SKIP_ES_EXTERNAL=true IMG=quay.io//jaeger-operator:"1.51.0" JAEGER_OPERATOR_VERSION="1.51.0" JAEGER_VERSION="1.51.0" ./tests/e2e/upgrade/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-02-123536 True False 94m Cluster version is 4.15.0-0.nightly-2023-12-02-123536' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.15.0-0.nightly-2023-12-02-123536 True False 94m Cluster version is 4.15.0-0.nightly-2023-12-02-123536' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z '' ']' ++ KAFKA_USE_CUSTOM_PODSET=false ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/upgrade/render.sh ++ export SUITE_DIR=./tests/e2e/upgrade ++ SUITE_DIR=./tests/e2e/upgrade ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/upgrade ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + export JAEGER_NAME + '[' true = true ']' + skip_test upgrade 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade + warning 'upgrade: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade: Test not supported in OpenShift\e[0m' WAR: upgrade: Test not supported in OpenShift + '[' true = true ']' + skip_test upgrade-from-latest-release 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade-from-latest-release + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade-from-latest-release + warning 'upgrade-from-latest-release: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade-from-latest-release: Test not supported in OpenShift\e[0m' WAR: upgrade-from-latest-release: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running upgrade E2E tests' Running upgrade E2E tests + cd tests/e2e/upgrade/_build + set +e + KUBECONFIG=/tmp/kubeconfig-3107041029 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-8sv8qw3g-5054a.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 08:12:33 | artifacts | Creating namespace: kuttl-test-natural-manatee logger.go:42: 08:12:33 | artifacts | artifacts events from ns kuttl-test-natural-manatee: logger.go:42: 08:12:33 | artifacts | Deleting namespace: kuttl-test-natural-manatee === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (6.31s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.13s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name upgrade --report --output /logs/artifacts/upgrade.xml ./artifacts/kuttl-report.xml time="2023-12-04T08:12:39Z" level=debug msg="Setting a new name for the test suites" time="2023-12-04T08:12:39Z" level=debug msg="Removing 'artifacts' TestCase" time="2023-12-04T08:12:39Z" level=debug msg="normalizing test case names" time="2023-12-04T08:12:39Z" level=debug msg="upgrade/artifacts -> upgrade_artifacts" +-------------------+--------+ | NAME | RESULT | +-------------------+--------+ | upgrade_artifacts | passed | +-------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests'