% Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 3831 100 3831 0 0 23049 0 --:--:-- --:--:-- --:--:-- 23078 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 9315 100 9315 0 0 53967 0 --:--:-- --:--:-- --:--:-- 54156 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 2607 100 2607 0 0 16907 0 --:--:-- --:--:-- --:--:-- 16928 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 1953 100 1953 0 0 13609 0 --:--:-- --:--:-- --:--:-- 13562 100 1953 100 1953 0 0 13601 0 --:--:-- --:--:-- --:--:-- 13562 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 350 100 350 0 0 1006 0 --:--:-- --:--:-- --:--:-- 1008 Installing kuttl Try 0... curl -sLo /tmp/jaeger-tests/hack/install/../../bin/kubectl-kuttl https://github.com/kudobuilder/kuttl/releases/download/v0.15.0/kubectl-kuttl_0.15.0_linux_x86_64 KUBECONFIG file is: /tmp/kubeconfig-1113221797 for suite in miscellaneous elasticsearch examples generate upgrade sidecar streaming ui; do \ make run-e2e-tests-$suite ; \ done make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh miscellaneous false true + '[' 3 -ne 3 ']' + test_suite_name=miscellaneous + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/miscellaneous.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-miscellaneous make[2]: Entering directory '/tmp/jaeger-tests' SKIP_ES_EXTERNAL=true ./tests/e2e/miscellaneous/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.18.0-0.nightly-2024-11-30-141716 True False 11m Cluster version is 4.18.0-0.nightly-2024-11-30-141716' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.18.0-0.nightly-2024-11-30-141716 True False 11m Cluster version is 4.18.0-0.nightly-2024-11-30-141716' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/miscellaneous/render.sh ++ export SUITE_DIR=./tests/e2e/miscellaneous ++ SUITE_DIR=./tests/e2e/miscellaneous ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/miscellaneous ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test collector-autoscale + '[' 1 -ne 1 ']' + test_name=collector-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-autoscale\e[0m' Rendering files for test collector-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p collector-autoscale + cd collector-autoscale + jaeger_name=simple-prod + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + ELASTICSEARCH_NODECOUNT=1 + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.resources.requests.memory="200m"' 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.autoscale=true 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.minReplicas=1 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.maxReplicas=2 01-install.yaml + version_lt 1.30 1.23 ++ echo 1.30 1.23 ++ tr ' ' '\n' ++ sort -rV ++ head -n 1 + test 1.30 '!=' 1.30 + rm ./03-assert.yaml + generate_otlp_e2e_tests http + test_protocol=http + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-http\e[0m' Rendering files for test collector-otlp-allinone-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-autoscale + '[' collector-autoscale '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-http + cd collector-otlp-allinone-http + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger http true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-http\e[0m' Rendering files for test collector-otlp-production-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-http + '[' collector-otlp-allinone-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-http + cd collector-otlp-production-http + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger http true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + generate_otlp_e2e_tests grpc + test_protocol=grpc + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-grpc\e[0m' Rendering files for test collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-http + '[' collector-otlp-production-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-grpc + cd collector-otlp-allinone-grpc + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-grpc\e[0m' Rendering files for test collector-otlp-production-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-grpc + '[' collector-otlp-allinone-grpc '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-grpc + cd collector-otlp-production-grpc + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + '[' true = true ']' + skip_test istio 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=istio + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-grpc + '[' collector-otlp-production-grpc '!=' _build ']' + cd .. + rm -rf istio + warning 'istio: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: istio: Test not supported in OpenShift\e[0m' WAR: istio: Test not supported in OpenShift + '[' true = true ']' + skip_test outside-cluster 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=outside-cluster + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + rm -rf outside-cluster + warning 'outside-cluster: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: outside-cluster: Test not supported in OpenShift\e[0m' WAR: outside-cluster: Test not supported in OpenShift + start_test set-custom-img + '[' 1 -ne 1 ']' + test_name=set-custom-img + echo =========================================================================== =========================================================================== + info 'Rendering files for test set-custom-img' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test set-custom-img\e[0m' Rendering files for test set-custom-img + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p set-custom-img + cd set-custom-img + jaeger_name=my-jaeger + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.image="test"' ./02-install.yaml + '[' true = true ']' + skip_test non-cluster-wide 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=non-cluster-wide + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/set-custom-img + '[' set-custom-img '!=' _build ']' + cd .. + rm -rf non-cluster-wide + warning 'non-cluster-wide: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: non-cluster-wide: Test not supported in OpenShift\e[0m' WAR: non-cluster-wide: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running miscellaneous E2E tests' Running miscellaneous E2E tests + cd tests/e2e/miscellaneous/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1113221797 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 8 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/cassandra-spark === PAUSE kuttl/harness/cassandra-spark === RUN kuttl/harness/collector-autoscale === PAUSE kuttl/harness/collector-autoscale === RUN kuttl/harness/collector-otlp-allinone-grpc === PAUSE kuttl/harness/collector-otlp-allinone-grpc === RUN kuttl/harness/collector-otlp-allinone-http === PAUSE kuttl/harness/collector-otlp-allinone-http === RUN kuttl/harness/collector-otlp-production-grpc === PAUSE kuttl/harness/collector-otlp-production-grpc === RUN kuttl/harness/collector-otlp-production-http === PAUSE kuttl/harness/collector-otlp-production-http === RUN kuttl/harness/set-custom-img === PAUSE kuttl/harness/set-custom-img === CONT kuttl/harness/artifacts logger.go:42: 13:20:06 | artifacts | Creating namespace: kuttl-test-allowed-husky logger.go:42: 13:20:06 | artifacts | artifacts events from ns kuttl-test-allowed-husky: logger.go:42: 13:20:06 | artifacts | Deleting namespace: kuttl-test-allowed-husky === CONT kuttl/harness/collector-otlp-allinone-http logger.go:42: 13:20:13 | collector-otlp-allinone-http | Creating namespace: kuttl-test-sure-hound logger.go:42: 13:20:13 | collector-otlp-allinone-http/0-install | starting test step 0-install logger.go:42: 13:20:13 | collector-otlp-allinone-http/0-install | Jaeger:kuttl-test-sure-hound/my-jaeger created logger.go:42: 13:20:31 | collector-otlp-allinone-http/0-install | test step completed 0-install logger.go:42: 13:20:31 | collector-otlp-allinone-http/1-smoke-test | starting test step 1-smoke-test logger.go:42: 13:20:31 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 13:20:32 | collector-otlp-allinone-http/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:20:41 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=http ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 13:20:42 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:20:42 | collector-otlp-allinone-http/1-smoke-test | job.batch/report-span created logger.go:42: 13:20:42 | collector-otlp-allinone-http/1-smoke-test | job.batch/check-span created logger.go:42: 13:21:07 | collector-otlp-allinone-http/1-smoke-test | test step completed 1-smoke-test logger.go:42: 13:21:07 | collector-otlp-allinone-http | collector-otlp-allinone-http events from ns kuttl-test-sure-hound: logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:17 +0000 UTC Normal Pod my-jaeger-79f4579c67-56mrc Binding Scheduled Successfully assigned kuttl-test-sure-hound/my-jaeger-79f4579c67-56mrc to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:17 +0000 UTC Normal ReplicaSet.apps my-jaeger-79f4579c67 SuccessfulCreate Created pod: my-jaeger-79f4579c67-56mrc replicaset-controller logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:17 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-79f4579c67 to 1 deployment-controller logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:18 +0000 UTC Normal Pod my-jaeger-79f4579c67-56mrc AddedInterface Add eth0 [10.128.2.23/23] from ovn-kubernetes multus logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:18 +0000 UTC Normal Pod my-jaeger-79f4579c67-56mrc.spec.containers{jaeger} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:27 +0000 UTC Normal Pod my-jaeger-79f4579c67-56mrc.spec.containers{jaeger} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" in 9.151s (9.151s including waiting). Image size: 147411949 bytes. kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:27 +0000 UTC Normal Pod my-jaeger-79f4579c67-56mrc.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:27 +0000 UTC Normal Pod my-jaeger-79f4579c67-56mrc.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:27 +0000 UTC Normal Pod my-jaeger-79f4579c67-56mrc.spec.containers{oauth-proxy} Pulling Pulling image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:30 +0000 UTC Normal Pod my-jaeger-79f4579c67-56mrc.spec.containers{oauth-proxy} Pulled Successfully pulled image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" in 2.31s (2.31s including waiting). Image size: 339954870 bytes. kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:30 +0000 UTC Normal Pod my-jaeger-79f4579c67-56mrc.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:30 +0000 UTC Normal Pod my-jaeger-79f4579c67-56mrc.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:36 +0000 UTC Normal Pod my-jaeger-79f4579c67-56mrc.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:36 +0000 UTC Normal Pod my-jaeger-79f4579c67-56mrc.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:36 +0000 UTC Normal ReplicaSet.apps my-jaeger-79f4579c67 SuccessfulDelete Deleted pod: my-jaeger-79f4579c67-56mrc replicaset-controller logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:36 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-79f4579c67 to 0 from 1 deployment-controller logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:37 +0000 UTC Normal Pod my-jaeger-784f6f9758-xs2nm Binding Scheduled Successfully assigned kuttl-test-sure-hound/my-jaeger-784f6f9758-xs2nm to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:37 +0000 UTC Normal ReplicaSet.apps my-jaeger-784f6f9758 SuccessfulCreate Created pod: my-jaeger-784f6f9758-xs2nm replicaset-controller logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:37 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-784f6f9758 to 1 deployment-controller logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:38 +0000 UTC Warning Pod my-jaeger-784f6f9758-xs2nm FailedMount MountVolume.SetUp failed for volume "my-jaeger-ui-oauth-proxy-tls" : failed to sync secret cache: timed out waiting for the condition kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:38 +0000 UTC Warning Pod my-jaeger-784f6f9758-xs2nm FailedMount MountVolume.SetUp failed for volume "kube-api-access-mqdbf" : failed to sync configmap cache: timed out waiting for the condition kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:40 +0000 UTC Normal Pod my-jaeger-784f6f9758-xs2nm AddedInterface Add eth0 [10.128.2.24/23] from ovn-kubernetes multus logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:40 +0000 UTC Normal Pod my-jaeger-784f6f9758-xs2nm.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:40 +0000 UTC Normal Pod my-jaeger-784f6f9758-xs2nm.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:40 +0000 UTC Normal Pod my-jaeger-784f6f9758-xs2nm.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:40 +0000 UTC Normal Pod my-jaeger-784f6f9758-xs2nm.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:40 +0000 UTC Normal Pod my-jaeger-784f6f9758-xs2nm.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:40 +0000 UTC Normal Pod my-jaeger-784f6f9758-xs2nm.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:42 +0000 UTC Normal Pod check-span-f4dgd Binding Scheduled Successfully assigned kuttl-test-sure-hound/check-span-f4dgd to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:42 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-f4dgd job-controller logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:42 +0000 UTC Normal Pod report-span-8nn9v Binding Scheduled Successfully assigned kuttl-test-sure-hound/report-span-8nn9v to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:42 +0000 UTC Normal Pod report-span-8nn9v AddedInterface Add eth0 [10.131.0.16/23] from ovn-kubernetes multus logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:42 +0000 UTC Normal Pod report-span-8nn9v.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:42 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-8nn9v job-controller logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:43 +0000 UTC Normal Pod check-span-f4dgd AddedInterface Add eth0 [10.129.2.20/23] from ovn-kubernetes multus logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:43 +0000 UTC Normal Pod check-span-f4dgd.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:48 +0000 UTC Normal Pod check-span-f4dgd.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 5.616s (5.616s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:48 +0000 UTC Normal Pod check-span-f4dgd.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:48 +0000 UTC Normal Pod check-span-f4dgd.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:52 +0000 UTC Normal Pod report-span-8nn9v.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 9.84s (9.84s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:52 +0000 UTC Normal Pod report-span-8nn9v.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:20:52 +0000 UTC Normal Pod report-span-8nn9v.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:21:07 | collector-otlp-allinone-http | 2024-12-02 13:21:07 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:21:07 | collector-otlp-allinone-http | Deleting namespace: kuttl-test-sure-hound === CONT kuttl/harness/set-custom-img logger.go:42: 13:21:20 | set-custom-img | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:21:20 | set-custom-img | Ignoring check-collector-img.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:21:20 | set-custom-img | Creating namespace: kuttl-test-massive-calf logger.go:42: 13:21:20 | set-custom-img/1-install | starting test step 1-install logger.go:42: 13:21:20 | set-custom-img/1-install | Jaeger:kuttl-test-massive-calf/my-jaeger created logger.go:42: 13:22:48 | set-custom-img/1-install | test step completed 1-install logger.go:42: 13:22:48 | set-custom-img/2-install | starting test step 2-install logger.go:42: 13:22:48 | set-custom-img/2-install | Jaeger:kuttl-test-massive-calf/my-jaeger updated logger.go:42: 13:22:48 | set-custom-img/2-install | test step completed 2-install logger.go:42: 13:22:48 | set-custom-img/3-check-image | starting test step 3-check-image logger.go:42: 13:22:48 | set-custom-img/3-check-image | running command: [sh -c ./check-collector-img.sh] logger.go:42: 13:22:49 | set-custom-img/3-check-image | Collector image missmatch. Expected: test. Has: registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:44686d560aa27cae8ff8693f88c4cb6e2edf1737010ec1f80709cb42250b729d logger.go:42: 13:22:54 | set-custom-img/3-check-image | Collector image asserted properly! logger.go:42: 13:22:54 | set-custom-img/3-check-image | test step completed 3-check-image logger.go:42: 13:22:54 | set-custom-img | set-custom-img events from ns kuttl-test-massive-calf: logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:21:48 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestmassivecalfmyjaeger-1-5bbddc9dfc SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestmassivecalfmyjaeger-1-5bbddc9dftgp89 replicaset-controller logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:21:48 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmassivecalfmyjaeger-1-5bbddc9dftgp89 Binding Scheduled Successfully assigned kuttl-test-massive-calf/elasticsearch-cdm-kuttltestmassivecalfmyjaeger-1-5bbddc9dftgp89 to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:21:48 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestmassivecalfmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestmassivecalfmyjaeger-1-5bbddc9dfc to 1 deployment-controller logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:21:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmassivecalfmyjaeger-1-5bbddc9dftgp89 AddedInterface Add eth0 [10.128.2.25/23] from ovn-kubernetes multus logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:21:49 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmassivecalfmyjaeger-1-5bbddc9dftgp89.spec.containers{elasticsearch} Pulling Pulling image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:8e4fbea4983cd58352349ca291383169b286bc166fad95a87807552ca43335e6" kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:21:55 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmassivecalfmyjaeger-1-5bbddc9dftgp89.spec.containers{elasticsearch} Pulled Successfully pulled image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:8e4fbea4983cd58352349ca291383169b286bc166fad95a87807552ca43335e6" in 6.179s (6.179s including waiting). Image size: 523762405 bytes. kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:21:55 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmassivecalfmyjaeger-1-5bbddc9dftgp89.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:21:55 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmassivecalfmyjaeger-1-5bbddc9dftgp89.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:21:55 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmassivecalfmyjaeger-1-5bbddc9dftgp89.spec.containers{proxy} Pulling Pulling image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:d68824b0b2c84db8e33edf9ab344eb684c4a7ebd7ef162bbc309043adcb28e6b" kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:21:58 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmassivecalfmyjaeger-1-5bbddc9dftgp89.spec.containers{proxy} Pulled Successfully pulled image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:d68824b0b2c84db8e33edf9ab344eb684c4a7ebd7ef162bbc309043adcb28e6b" in 2.799s (2.799s including waiting). Image size: 272839959 bytes. kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:21:58 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmassivecalfmyjaeger-1-5bbddc9dftgp89.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:21:58 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestmassivecalfmyjaeger-1-5bbddc9dftgp89.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:09 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestmassivecalfmyjaeger-1-5bbddc9dftgp89.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:25 +0000 UTC Normal Pod my-jaeger-collector-86657d9bb9-trsmn Binding Scheduled Successfully assigned kuttl-test-massive-calf/my-jaeger-collector-86657d9bb9-trsmn to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:25 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-86657d9bb9 SuccessfulCreate Created pod: my-jaeger-collector-86657d9bb9-trsmn replicaset-controller logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:25 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-86657d9bb9 to 1 deployment-controller logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:26 +0000 UTC Normal Pod my-jaeger-collector-86657d9bb9-trsmn AddedInterface Add eth0 [10.131.0.17/23] from ovn-kubernetes multus logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:26 +0000 UTC Normal Pod my-jaeger-collector-86657d9bb9-trsmn.spec.containers{jaeger-collector} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:44686d560aa27cae8ff8693f88c4cb6e2edf1737010ec1f80709cb42250b729d" kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:26 +0000 UTC Normal Pod my-jaeger-query-8685bf549c-clfvx Binding Scheduled Successfully assigned kuttl-test-massive-calf/my-jaeger-query-8685bf549c-clfvx to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:26 +0000 UTC Normal Pod my-jaeger-query-8685bf549c-clfvx AddedInterface Add eth0 [10.129.2.21/23] from ovn-kubernetes multus logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:26 +0000 UTC Normal Pod my-jaeger-query-8685bf549c-clfvx.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:26 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-8685bf549c SuccessfulCreate Created pod: my-jaeger-query-8685bf549c-clfvx replicaset-controller logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:26 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-8685bf549c to 1 deployment-controller logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:36 +0000 UTC Normal Pod my-jaeger-query-8685bf549c-clfvx.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" in 9.862s (9.862s including waiting). Image size: 192936692 bytes. kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:36 +0000 UTC Normal Pod my-jaeger-query-8685bf549c-clfvx.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:36 +0000 UTC Normal Pod my-jaeger-query-8685bf549c-clfvx.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:36 +0000 UTC Normal Pod my-jaeger-query-8685bf549c-clfvx.spec.containers{oauth-proxy} Pulling Pulling image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:37 +0000 UTC Normal Pod my-jaeger-collector-86657d9bb9-trsmn.spec.containers{jaeger-collector} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:44686d560aa27cae8ff8693f88c4cb6e2edf1737010ec1f80709cb42250b729d" in 11.261s (11.261s including waiting). Image size: 142489237 bytes. kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:37 +0000 UTC Normal Pod my-jaeger-collector-86657d9bb9-trsmn.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:37 +0000 UTC Normal Pod my-jaeger-collector-86657d9bb9-trsmn.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:42 +0000 UTC Normal Pod my-jaeger-query-8685bf549c-clfvx.spec.containers{oauth-proxy} Pulled Successfully pulled image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" in 5.485s (5.485s including waiting). Image size: 339954870 bytes. kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:42 +0000 UTC Normal Pod my-jaeger-query-8685bf549c-clfvx.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:42 +0000 UTC Normal Pod my-jaeger-query-8685bf549c-clfvx.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:42 +0000 UTC Normal Pod my-jaeger-query-8685bf549c-clfvx.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:46 +0000 UTC Normal Pod my-jaeger-query-8685bf549c-clfvx.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" in 3.557s (3.557s including waiting). Image size: 115697219 bytes. kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:46 +0000 UTC Normal Pod my-jaeger-query-8685bf549c-clfvx.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:46 +0000 UTC Normal Pod my-jaeger-query-8685bf549c-clfvx.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:52 +0000 UTC Normal Pod my-jaeger-collector-86657d9bb9-trsmn.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:52 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-86657d9bb9 SuccessfulDelete Deleted pod: my-jaeger-collector-86657d9bb9-trsmn replicaset-controller logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:52 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled down replica set my-jaeger-collector-86657d9bb9 to 0 from 1 deployment-controller logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:53 +0000 UTC Normal Pod my-jaeger-collector-75cc6c7c94-mqchq Binding Scheduled Successfully assigned kuttl-test-massive-calf/my-jaeger-collector-75cc6c7c94-mqchq to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:53 +0000 UTC Normal Pod my-jaeger-collector-75cc6c7c94-mqchq AddedInterface Add eth0 [10.131.0.18/23] from ovn-kubernetes multus logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:53 +0000 UTC Normal Pod my-jaeger-collector-75cc6c7c94-mqchq.spec.containers{jaeger-collector} Pulling Pulling image "test" kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:53 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-75cc6c7c94 SuccessfulCreate Created pod: my-jaeger-collector-75cc6c7c94-mqchq replicaset-controller logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:53 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-75cc6c7c94 to 1 deployment-controller logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:54 +0000 UTC Warning Pod my-jaeger-collector-75cc6c7c94-mqchq.spec.containers{jaeger-collector} Failed Failed to pull image "test": initializing source docker://test:latest: reading manifest latest in docker.io/library/test: requested access to the resource is denied kubelet logger.go:42: 13:22:54 | set-custom-img | 2024-12-02 13:22:54 +0000 UTC Warning Pod my-jaeger-collector-75cc6c7c94-mqchq.spec.containers{jaeger-collector} Failed Error: ErrImagePull kubelet logger.go:42: 13:22:54 | set-custom-img | Deleting namespace: kuttl-test-massive-calf === CONT kuttl/harness/collector-otlp-production-http logger.go:42: 13:23:01 | collector-otlp-production-http | Creating namespace: kuttl-test-enormous-asp logger.go:42: 13:23:01 | collector-otlp-production-http/1-install | starting test step 1-install logger.go:42: 13:23:01 | collector-otlp-production-http/1-install | Jaeger:kuttl-test-enormous-asp/my-jaeger created logger.go:42: 13:23:55 | collector-otlp-production-http/1-install | test step completed 1-install logger.go:42: 13:23:55 | collector-otlp-production-http/2-smoke-test | starting test step 2-smoke-test logger.go:42: 13:23:55 | collector-otlp-production-http/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 13:23:57 | collector-otlp-production-http/2-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:24:03 | collector-otlp-production-http/2-smoke-test | running command: [sh -c REPORTING_PROTOCOL=http ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 13:24:04 | collector-otlp-production-http/2-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:24:04 | collector-otlp-production-http/2-smoke-test | job.batch/report-span created logger.go:42: 13:24:04 | collector-otlp-production-http/2-smoke-test | job.batch/check-span created logger.go:42: 13:24:28 | collector-otlp-production-http/2-smoke-test | test step completed 2-smoke-test logger.go:42: 13:24:28 | collector-otlp-production-http | collector-otlp-production-http events from ns kuttl-test-enormous-asp: logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:25 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestenormousaspmyjaeger-1-5ffbbf74b9 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestenormousaspmyjaeger-1-5ffbbf74bgdlvn replicaset-controller logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestenormousaspmyjaeger-1-5ffbbf74bgdlvn Binding Scheduled Successfully assigned kuttl-test-enormous-asp/elasticsearch-cdm-kuttltestenormousaspmyjaeger-1-5ffbbf74bgdlvn to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestenormousaspmyjaeger-1-5ffbbf74bgdlvn AddedInterface Add eth0 [10.128.2.26/23] from ovn-kubernetes multus logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:25 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestenormousaspmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestenormousaspmyjaeger-1-5ffbbf74b9 to 1 deployment-controller logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestenormousaspmyjaeger-1-5ffbbf74bgdlvn.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:8e4fbea4983cd58352349ca291383169b286bc166fad95a87807552ca43335e6" already present on machine kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestenormousaspmyjaeger-1-5ffbbf74bgdlvn.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestenormousaspmyjaeger-1-5ffbbf74bgdlvn.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestenormousaspmyjaeger-1-5ffbbf74bgdlvn.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:d68824b0b2c84db8e33edf9ab344eb684c4a7ebd7ef162bbc309043adcb28e6b" already present on machine kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestenormousaspmyjaeger-1-5ffbbf74bgdlvn.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestenormousaspmyjaeger-1-5ffbbf74bgdlvn.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:41 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestenormousaspmyjaeger-1-5ffbbf74bgdlvn.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:52 +0000 UTC Normal Pod my-jaeger-collector-64f7bc4b4f-7q8wr Binding Scheduled Successfully assigned kuttl-test-enormous-asp/my-jaeger-collector-64f7bc4b4f-7q8wr to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:52 +0000 UTC Normal Pod my-jaeger-collector-64f7bc4b4f-7q8wr AddedInterface Add eth0 [10.131.0.19/23] from ovn-kubernetes multus logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:52 +0000 UTC Normal Pod my-jaeger-collector-64f7bc4b4f-7q8wr.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:44686d560aa27cae8ff8693f88c4cb6e2edf1737010ec1f80709cb42250b729d" already present on machine kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:52 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-64f7bc4b4f SuccessfulCreate Created pod: my-jaeger-collector-64f7bc4b4f-7q8wr replicaset-controller logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:52 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-64f7bc4b4f to 1 deployment-controller logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:52 +0000 UTC Normal Pod my-jaeger-query-794f6949c9-fqfkd Binding Scheduled Successfully assigned kuttl-test-enormous-asp/my-jaeger-query-794f6949c9-fqfkd to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:52 +0000 UTC Normal Pod my-jaeger-query-794f6949c9-fqfkd AddedInterface Add eth0 [10.129.2.22/23] from ovn-kubernetes multus logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:52 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-794f6949c9 SuccessfulCreate Created pod: my-jaeger-query-794f6949c9-fqfkd replicaset-controller logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:52 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-794f6949c9 to 1 deployment-controller logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:53 +0000 UTC Normal Pod my-jaeger-collector-64f7bc4b4f-7q8wr.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:53 +0000 UTC Normal Pod my-jaeger-collector-64f7bc4b4f-7q8wr.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:53 +0000 UTC Normal Pod my-jaeger-query-794f6949c9-fqfkd.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:53 +0000 UTC Normal Pod my-jaeger-query-794f6949c9-fqfkd.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:53 +0000 UTC Normal Pod my-jaeger-query-794f6949c9-fqfkd.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:53 +0000 UTC Normal Pod my-jaeger-query-794f6949c9-fqfkd.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:53 +0000 UTC Normal Pod my-jaeger-query-794f6949c9-fqfkd.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:53 +0000 UTC Normal Pod my-jaeger-query-794f6949c9-fqfkd.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:53 +0000 UTC Normal Pod my-jaeger-query-794f6949c9-fqfkd.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:53 +0000 UTC Normal Pod my-jaeger-query-794f6949c9-fqfkd.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:53 +0000 UTC Normal Pod my-jaeger-query-794f6949c9-fqfkd.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:59 +0000 UTC Normal Pod my-jaeger-query-794f6949c9-fqfkd.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:59 +0000 UTC Normal Pod my-jaeger-query-794f6949c9-fqfkd.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:59 +0000 UTC Normal Pod my-jaeger-query-794f6949c9-fqfkd.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:59 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-794f6949c9 SuccessfulDelete Deleted pod: my-jaeger-query-794f6949c9-fqfkd replicaset-controller logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:23:59 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-794f6949c9 to 0 from 1 deployment-controller logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:00 +0000 UTC Normal Pod my-jaeger-query-846cb86746-wwkkh Binding Scheduled Successfully assigned kuttl-test-enormous-asp/my-jaeger-query-846cb86746-wwkkh to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:00 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-846cb86746 SuccessfulCreate Created pod: my-jaeger-query-846cb86746-wwkkh replicaset-controller logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:00 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-846cb86746 to 1 deployment-controller logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:01 +0000 UTC Normal Pod my-jaeger-query-846cb86746-wwkkh AddedInterface Add eth0 [10.129.2.23/23] from ovn-kubernetes multus logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:01 +0000 UTC Normal Pod my-jaeger-query-846cb86746-wwkkh.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:01 +0000 UTC Normal Pod my-jaeger-query-846cb86746-wwkkh.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:01 +0000 UTC Normal Pod my-jaeger-query-846cb86746-wwkkh.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:01 +0000 UTC Normal Pod my-jaeger-query-846cb86746-wwkkh.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:01 +0000 UTC Normal Pod my-jaeger-query-846cb86746-wwkkh.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:02 +0000 UTC Normal Pod my-jaeger-query-846cb86746-wwkkh.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:02 +0000 UTC Normal Pod my-jaeger-query-846cb86746-wwkkh.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:02 +0000 UTC Normal Pod my-jaeger-query-846cb86746-wwkkh.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:02 +0000 UTC Normal Pod my-jaeger-query-846cb86746-wwkkh.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:04 +0000 UTC Normal Pod check-span-r8g6j Binding Scheduled Successfully assigned kuttl-test-enormous-asp/check-span-r8g6j to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:04 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-r8g6j job-controller logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:04 +0000 UTC Normal Pod report-span-cpckn Binding Scheduled Successfully assigned kuttl-test-enormous-asp/report-span-cpckn to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:04 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-cpckn job-controller logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:05 +0000 UTC Normal Pod check-span-r8g6j AddedInterface Add eth0 [10.131.0.21/23] from ovn-kubernetes multus logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:05 +0000 UTC Normal Pod check-span-r8g6j.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:05 +0000 UTC Normal Pod report-span-cpckn AddedInterface Add eth0 [10.131.0.20/23] from ovn-kubernetes multus logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:05 +0000 UTC Normal Pod report-span-cpckn.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:07 +0000 UTC Normal Pod check-span-r8g6j.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 2.578s (2.578s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:08 +0000 UTC Normal Pod check-span-r8g6j.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:08 +0000 UTC Normal Pod check-span-r8g6j.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod my-jaeger-collector-64f7bc4b4f-7q8wr horizontal-pod-autoscaler logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:10 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:11 +0000 UTC Normal Pod report-span-cpckn.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 5.768s (5.768s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:11 +0000 UTC Normal Pod report-span-cpckn.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:11 +0000 UTC Normal Pod report-span-cpckn.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:25 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:25 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:24:28 | collector-otlp-production-http | 2024-12-02 13:24:27 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:24:28 | collector-otlp-production-http | Deleting namespace: kuttl-test-enormous-asp === CONT kuttl/harness/collector-otlp-production-grpc logger.go:42: 13:24:41 | collector-otlp-production-grpc | Creating namespace: kuttl-test-literate-gorilla logger.go:42: 13:24:41 | collector-otlp-production-grpc/1-install | starting test step 1-install logger.go:42: 13:24:41 | collector-otlp-production-grpc/1-install | Jaeger:kuttl-test-literate-gorilla/my-jaeger created logger.go:42: 13:25:39 | collector-otlp-production-grpc/1-install | test step completed 1-install logger.go:42: 13:25:39 | collector-otlp-production-grpc/2-smoke-test | starting test step 2-smoke-test logger.go:42: 13:25:39 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 13:25:40 | collector-otlp-production-grpc/2-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:25:49 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c REPORTING_PROTOCOL=grpc ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 13:25:49 | collector-otlp-production-grpc/2-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:25:49 | collector-otlp-production-grpc/2-smoke-test | job.batch/report-span created logger.go:42: 13:25:50 | collector-otlp-production-grpc/2-smoke-test | job.batch/check-span created logger.go:42: 13:26:11 | collector-otlp-production-grpc/2-smoke-test | test step completed 2-smoke-test logger.go:42: 13:26:11 | collector-otlp-production-grpc | collector-otlp-production-grpc events from ns kuttl-test-literate-gorilla: logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:08 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestliterategorillamyjaeger-1-79b9899665 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestliterategorillamyjaeger-1-79b98cvxdn replicaset-controller logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:08 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestliterategorillamyjaeger-1-79b98cvxdn Binding Scheduled Successfully assigned kuttl-test-literate-gorilla/elasticsearch-cdm-kuttltestliterategorillamyjaeger-1-79b98cvxdn to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:08 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestliterategorillamyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestliterategorillamyjaeger-1-79b9899665 to 1 deployment-controller logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestliterategorillamyjaeger-1-79b98cvxdn AddedInterface Add eth0 [10.128.2.27/23] from ovn-kubernetes multus logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestliterategorillamyjaeger-1-79b98cvxdn.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:8e4fbea4983cd58352349ca291383169b286bc166fad95a87807552ca43335e6" already present on machine kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestliterategorillamyjaeger-1-79b98cvxdn.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestliterategorillamyjaeger-1-79b98cvxdn.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestliterategorillamyjaeger-1-79b98cvxdn.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:d68824b0b2c84db8e33edf9ab344eb684c4a7ebd7ef162bbc309043adcb28e6b" already present on machine kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestliterategorillamyjaeger-1-79b98cvxdn.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:09 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestliterategorillamyjaeger-1-79b98cvxdn.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:24 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestliterategorillamyjaeger-1-79b98cvxdn.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:35 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-6c74c64db7 to 1 deployment-controller logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:36 +0000 UTC Normal Pod my-jaeger-collector-6c74c64db7-dpgb8 Binding Scheduled Successfully assigned kuttl-test-literate-gorilla/my-jaeger-collector-6c74c64db7-dpgb8 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:36 +0000 UTC Normal Pod my-jaeger-collector-6c74c64db7-dpgb8 AddedInterface Add eth0 [10.131.0.22/23] from ovn-kubernetes multus logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:36 +0000 UTC Normal Pod my-jaeger-collector-6c74c64db7-dpgb8.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:44686d560aa27cae8ff8693f88c4cb6e2edf1737010ec1f80709cb42250b729d" already present on machine kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:36 +0000 UTC Normal Pod my-jaeger-collector-6c74c64db7-dpgb8.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:36 +0000 UTC Normal Pod my-jaeger-collector-6c74c64db7-dpgb8.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:36 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-6c74c64db7 SuccessfulCreate Created pod: my-jaeger-collector-6c74c64db7-dpgb8 replicaset-controller logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:36 +0000 UTC Normal Pod my-jaeger-query-6b769fbcc8-xvl4l Binding Scheduled Successfully assigned kuttl-test-literate-gorilla/my-jaeger-query-6b769fbcc8-xvl4l to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:36 +0000 UTC Normal Pod my-jaeger-query-6b769fbcc8-xvl4l AddedInterface Add eth0 [10.129.2.24/23] from ovn-kubernetes multus logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:36 +0000 UTC Normal Pod my-jaeger-query-6b769fbcc8-xvl4l.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:36 +0000 UTC Normal Pod my-jaeger-query-6b769fbcc8-xvl4l.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:36 +0000 UTC Normal Pod my-jaeger-query-6b769fbcc8-xvl4l.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:36 +0000 UTC Normal Pod my-jaeger-query-6b769fbcc8-xvl4l.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:36 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-6b769fbcc8 SuccessfulCreate Created pod: my-jaeger-query-6b769fbcc8-xvl4l replicaset-controller logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:36 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-6b769fbcc8 to 1 deployment-controller logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:37 +0000 UTC Normal Pod my-jaeger-query-6b769fbcc8-xvl4l.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:37 +0000 UTC Normal Pod my-jaeger-query-6b769fbcc8-xvl4l.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:37 +0000 UTC Normal Pod my-jaeger-query-6b769fbcc8-xvl4l.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:37 +0000 UTC Normal Pod my-jaeger-query-6b769fbcc8-xvl4l.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:37 +0000 UTC Normal Pod my-jaeger-query-6b769fbcc8-xvl4l.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:44 +0000 UTC Normal Pod my-jaeger-query-6b769fbcc8-xvl4l.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:44 +0000 UTC Normal Pod my-jaeger-query-6b769fbcc8-xvl4l.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:44 +0000 UTC Normal Pod my-jaeger-query-6b769fbcc8-xvl4l.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:44 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-6b769fbcc8 SuccessfulDelete Deleted pod: my-jaeger-query-6b769fbcc8-xvl4l replicaset-controller logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:44 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-6b769fbcc8 to 0 from 1 deployment-controller logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:45 +0000 UTC Normal Pod my-jaeger-query-88d54db7f-sw84g Binding Scheduled Successfully assigned kuttl-test-literate-gorilla/my-jaeger-query-88d54db7f-sw84g to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:45 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-88d54db7f SuccessfulCreate Created pod: my-jaeger-query-88d54db7f-sw84g replicaset-controller logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:45 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-88d54db7f to 1 deployment-controller logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:46 +0000 UTC Normal Pod my-jaeger-query-88d54db7f-sw84g AddedInterface Add eth0 [10.129.2.25/23] from ovn-kubernetes multus logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:46 +0000 UTC Normal Pod my-jaeger-query-88d54db7f-sw84g.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:46 +0000 UTC Normal Pod my-jaeger-query-88d54db7f-sw84g.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:46 +0000 UTC Normal Pod my-jaeger-query-88d54db7f-sw84g.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:46 +0000 UTC Normal Pod my-jaeger-query-88d54db7f-sw84g.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:46 +0000 UTC Normal Pod my-jaeger-query-88d54db7f-sw84g.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:46 +0000 UTC Normal Pod my-jaeger-query-88d54db7f-sw84g.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:46 +0000 UTC Normal Pod my-jaeger-query-88d54db7f-sw84g.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:46 +0000 UTC Normal Pod my-jaeger-query-88d54db7f-sw84g.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:46 +0000 UTC Normal Pod my-jaeger-query-88d54db7f-sw84g.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:49 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-9hbqw job-controller logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:50 +0000 UTC Normal Pod check-span-xx469 Binding Scheduled Successfully assigned kuttl-test-literate-gorilla/check-span-xx469 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:50 +0000 UTC Normal Pod check-span-xx469 AddedInterface Add eth0 [10.131.0.24/23] from ovn-kubernetes multus logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:50 +0000 UTC Normal Pod check-span-xx469.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:50 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-xx469 job-controller logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:50 +0000 UTC Normal Pod report-span-9hbqw Binding Scheduled Successfully assigned kuttl-test-literate-gorilla/report-span-9hbqw to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:50 +0000 UTC Normal Pod report-span-9hbqw AddedInterface Add eth0 [10.131.0.23/23] from ovn-kubernetes multus logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:50 +0000 UTC Normal Pod report-span-9hbqw.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:51 +0000 UTC Normal Pod check-span-xx469.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 1.097s (1.097s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:51 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:51 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:51 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:51 +0000 UTC Normal Pod report-span-9hbqw.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 1.075s (1.075s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:51 +0000 UTC Normal Pod report-span-9hbqw.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:52 +0000 UTC Normal Pod check-span-xx469.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:52 +0000 UTC Normal Pod check-span-xx469.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:25:52 +0000 UTC Normal Pod report-span-9hbqw.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:26:11 | collector-otlp-production-grpc | 2024-12-02 13:26:10 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:26:11 | collector-otlp-production-grpc | Deleting namespace: kuttl-test-literate-gorilla === CONT kuttl/harness/collector-autoscale logger.go:42: 13:26:24 | collector-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:26:24 | collector-autoscale | Creating namespace: kuttl-test-generous-mayfly logger.go:42: 13:26:24 | collector-autoscale/1-install | starting test step 1-install logger.go:42: 13:26:24 | collector-autoscale/1-install | Jaeger:kuttl-test-generous-mayfly/simple-prod created logger.go:42: 13:27:14 | collector-autoscale/1-install | test step completed 1-install logger.go:42: 13:27:14 | collector-autoscale/2- | starting test step 2- logger.go:42: 13:27:14 | collector-autoscale/2- | test step completed 2- logger.go:42: 13:27:14 | collector-autoscale | collector-autoscale events from ns kuttl-test-generous-mayfly: logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:26:44 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestgenerousmayflysimpleprod-1-6dc7699fd9 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestgenerousmayflysimpleprod-1-6dc7g5zzb replicaset-controller logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:26:44 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestgenerousmayflysimpleprod-1-6dc7g5zzb Binding Scheduled Successfully assigned kuttl-test-generous-mayfly/elasticsearch-cdm-kuttltestgenerousmayflysimpleprod-1-6dc7g5zzb to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:26:44 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestgenerousmayflysimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestgenerousmayflysimpleprod-1-6dc7699fd9 to 1 deployment-controller logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:26:45 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestgenerousmayflysimpleprod-1-6dc7g5zzb AddedInterface Add eth0 [10.128.2.28/23] from ovn-kubernetes multus logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:26:45 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestgenerousmayflysimpleprod-1-6dc7g5zzb.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:8e4fbea4983cd58352349ca291383169b286bc166fad95a87807552ca43335e6" already present on machine kubelet logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:26:45 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestgenerousmayflysimpleprod-1-6dc7g5zzb.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:26:45 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestgenerousmayflysimpleprod-1-6dc7g5zzb.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:26:45 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestgenerousmayflysimpleprod-1-6dc7g5zzb.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:d68824b0b2c84db8e33edf9ab344eb684c4a7ebd7ef162bbc309043adcb28e6b" already present on machine kubelet logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:26:45 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestgenerousmayflysimpleprod-1-6dc7g5zzb.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:26:45 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestgenerousmayflysimpleprod-1-6dc7g5zzb.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:27:00 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestgenerousmayflysimpleprod-1-6dc7g5zzb.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:27:11 +0000 UTC Normal Pod simple-prod-collector-5df7c599d9-4qrhn Binding Scheduled Successfully assigned kuttl-test-generous-mayfly/simple-prod-collector-5df7c599d9-4qrhn to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:27:11 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5df7c599d9 SuccessfulCreate Created pod: simple-prod-collector-5df7c599d9-4qrhn replicaset-controller logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:27:11 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-5df7c599d9 to 1 deployment-controller logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:27:11 +0000 UTC Normal Pod simple-prod-query-6b4579dfb-g485w Binding Scheduled Successfully assigned kuttl-test-generous-mayfly/simple-prod-query-6b4579dfb-g485w to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:27:11 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6b4579dfb SuccessfulCreate Created pod: simple-prod-query-6b4579dfb-g485w replicaset-controller logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:27:11 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-6b4579dfb to 1 deployment-controller logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:27:12 +0000 UTC Normal Pod simple-prod-collector-5df7c599d9-4qrhn AddedInterface Add eth0 [10.131.0.25/23] from ovn-kubernetes multus logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:27:12 +0000 UTC Normal Pod simple-prod-collector-5df7c599d9-4qrhn.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:44686d560aa27cae8ff8693f88c4cb6e2edf1737010ec1f80709cb42250b729d" already present on machine kubelet logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:27:12 +0000 UTC Normal Pod simple-prod-collector-5df7c599d9-4qrhn.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:27:12 +0000 UTC Normal Pod simple-prod-collector-5df7c599d9-4qrhn.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:27:12 +0000 UTC Normal Pod simple-prod-query-6b4579dfb-g485w AddedInterface Add eth0 [10.129.2.26/23] from ovn-kubernetes multus logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:27:12 +0000 UTC Normal Pod simple-prod-query-6b4579dfb-g485w.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:27:12 +0000 UTC Normal Pod simple-prod-query-6b4579dfb-g485w.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:27:12 +0000 UTC Normal Pod simple-prod-query-6b4579dfb-g485w.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:27:12 +0000 UTC Normal Pod simple-prod-query-6b4579dfb-g485w.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:27:12 +0000 UTC Normal Pod simple-prod-query-6b4579dfb-g485w.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:27:12 +0000 UTC Normal Pod simple-prod-query-6b4579dfb-g485w.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:27:12 +0000 UTC Normal Pod simple-prod-query-6b4579dfb-g485w.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:27:12 +0000 UTC Normal Pod simple-prod-query-6b4579dfb-g485w.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:27:14 | collector-autoscale | 2024-12-02 13:27:12 +0000 UTC Normal Pod simple-prod-query-6b4579dfb-g485w.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:27:14 | collector-autoscale | Deleting namespace: kuttl-test-generous-mayfly === CONT kuttl/harness/collector-otlp-allinone-grpc logger.go:42: 13:27:20 | collector-otlp-allinone-grpc | Creating namespace: kuttl-test-complete-yak logger.go:42: 13:27:20 | collector-otlp-allinone-grpc/0-install | starting test step 0-install logger.go:42: 13:27:20 | collector-otlp-allinone-grpc/0-install | Jaeger:kuttl-test-complete-yak/my-jaeger created logger.go:42: 13:27:27 | collector-otlp-allinone-grpc/0-install | test step completed 0-install logger.go:42: 13:27:27 | collector-otlp-allinone-grpc/1-smoke-test | starting test step 1-smoke-test logger.go:42: 13:27:27 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 13:27:29 | collector-otlp-allinone-grpc/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:27:35 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=grpc ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 13:27:36 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:27:36 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/report-span created logger.go:42: 13:27:36 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/check-span created logger.go:42: 13:27:57 | collector-otlp-allinone-grpc/1-smoke-test | test step completed 1-smoke-test logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | collector-otlp-allinone-grpc events from ns kuttl-test-complete-yak: logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:24 +0000 UTC Normal Pod my-jaeger-77cd8dc447-5hbkw Binding Scheduled Successfully assigned kuttl-test-complete-yak/my-jaeger-77cd8dc447-5hbkw to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:24 +0000 UTC Normal ReplicaSet.apps my-jaeger-77cd8dc447 SuccessfulCreate Created pod: my-jaeger-77cd8dc447-5hbkw replicaset-controller logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:24 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-77cd8dc447 to 1 deployment-controller logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:25 +0000 UTC Normal Pod my-jaeger-77cd8dc447-5hbkw AddedInterface Add eth0 [10.128.2.29/23] from ovn-kubernetes multus logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:25 +0000 UTC Normal Pod my-jaeger-77cd8dc447-5hbkw.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:25 +0000 UTC Normal Pod my-jaeger-77cd8dc447-5hbkw.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:25 +0000 UTC Normal Pod my-jaeger-77cd8dc447-5hbkw.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:25 +0000 UTC Normal Pod my-jaeger-77cd8dc447-5hbkw.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:25 +0000 UTC Normal Pod my-jaeger-77cd8dc447-5hbkw.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:25 +0000 UTC Normal Pod my-jaeger-77cd8dc447-5hbkw.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:32 +0000 UTC Normal Pod my-jaeger-77cd8dc447-5hbkw.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:32 +0000 UTC Normal Pod my-jaeger-77cd8dc447-5hbkw.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:32 +0000 UTC Normal ReplicaSet.apps my-jaeger-77cd8dc447 SuccessfulDelete Deleted pod: my-jaeger-77cd8dc447-5hbkw replicaset-controller logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:32 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-77cd8dc447 to 0 from 1 deployment-controller logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:33 +0000 UTC Normal Pod my-jaeger-648b95f495-9948z Binding Scheduled Successfully assigned kuttl-test-complete-yak/my-jaeger-648b95f495-9948z to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:33 +0000 UTC Normal ReplicaSet.apps my-jaeger-648b95f495 SuccessfulCreate Created pod: my-jaeger-648b95f495-9948z replicaset-controller logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:33 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-648b95f495 to 1 deployment-controller logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:34 +0000 UTC Normal Pod my-jaeger-648b95f495-9948z AddedInterface Add eth0 [10.128.2.30/23] from ovn-kubernetes multus logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:34 +0000 UTC Normal Pod my-jaeger-648b95f495-9948z.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:34 +0000 UTC Normal Pod my-jaeger-648b95f495-9948z.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:34 +0000 UTC Normal Pod my-jaeger-648b95f495-9948z.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:34 +0000 UTC Normal Pod my-jaeger-648b95f495-9948z.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:34 +0000 UTC Normal Pod my-jaeger-648b95f495-9948z.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:34 +0000 UTC Normal Pod my-jaeger-648b95f495-9948z.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:36 +0000 UTC Normal Pod check-span-88llt Binding Scheduled Successfully assigned kuttl-test-complete-yak/check-span-88llt to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:36 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-88llt job-controller logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:36 +0000 UTC Normal Pod report-span-9djfp Binding Scheduled Successfully assigned kuttl-test-complete-yak/report-span-9djfp to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:36 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-9djfp job-controller logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:37 +0000 UTC Normal Pod check-span-88llt AddedInterface Add eth0 [10.129.2.27/23] from ovn-kubernetes multus logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:37 +0000 UTC Normal Pod check-span-88llt.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:37 +0000 UTC Normal Pod report-span-9djfp AddedInterface Add eth0 [10.131.0.26/23] from ovn-kubernetes multus logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:37 +0000 UTC Normal Pod report-span-9djfp.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:38 +0000 UTC Normal Pod check-span-88llt.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 810ms (810ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:38 +0000 UTC Normal Pod check-span-88llt.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:38 +0000 UTC Normal Pod check-span-88llt.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:38 +0000 UTC Normal Pod report-span-9djfp.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 861ms (861ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:38 +0000 UTC Normal Pod report-span-9djfp.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:38 +0000 UTC Normal Pod report-span-9djfp.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | 2024-12-02 13:27:56 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:27:57 | collector-otlp-allinone-grpc | Deleting namespace: kuttl-test-complete-yak === CONT kuttl/harness/cassandra-spark logger.go:42: 13:28:09 | cassandra-spark | Ignoring 01-assert.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:28:09 | cassandra-spark | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:28:09 | cassandra-spark | Creating namespace: kuttl-test-fluent-viper logger.go:42: 13:28:09 | cassandra-spark | cassandra-spark events from ns kuttl-test-fluent-viper: logger.go:42: 13:28:09 | cassandra-spark | Deleting namespace: kuttl-test-fluent-viper === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (489.09s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.64s) --- PASS: kuttl/harness/collector-otlp-allinone-http (67.24s) --- PASS: kuttl/harness/set-custom-img (100.29s) --- PASS: kuttl/harness/collector-otlp-production-http (100.27s) --- PASS: kuttl/harness/collector-otlp-production-grpc (103.08s) --- PASS: kuttl/harness/collector-autoscale (56.08s) --- PASS: kuttl/harness/collector-otlp-allinone-grpc (49.16s) --- PASS: kuttl/harness/cassandra-spark (6.13s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name miscellaneous --report --output /logs/artifacts/miscellaneous.xml ./artifacts/kuttl-report.xml time="2024-12-02T13:28:17Z" level=debug msg="Setting a new name for the test suites" time="2024-12-02T13:28:17Z" level=debug msg="Removing 'artifacts' TestCase" time="2024-12-02T13:28:17Z" level=debug msg="normalizing test case names" time="2024-12-02T13:28:17Z" level=debug msg="miscellaneous/artifacts -> miscellaneous_artifacts" time="2024-12-02T13:28:17Z" level=debug msg="miscellaneous/collector-otlp-allinone-http -> miscellaneous_collector_otlp_allinone_http" time="2024-12-02T13:28:17Z" level=debug msg="miscellaneous/set-custom-img -> miscellaneous_set_custom_img" time="2024-12-02T13:28:17Z" level=debug msg="miscellaneous/collector-otlp-production-http -> miscellaneous_collector_otlp_production_http" time="2024-12-02T13:28:17Z" level=debug msg="miscellaneous/collector-otlp-production-grpc -> miscellaneous_collector_otlp_production_grpc" time="2024-12-02T13:28:17Z" level=debug msg="miscellaneous/collector-autoscale -> miscellaneous_collector_autoscale" time="2024-12-02T13:28:17Z" level=debug msg="miscellaneous/collector-otlp-allinone-grpc -> miscellaneous_collector_otlp_allinone_grpc" time="2024-12-02T13:28:17Z" level=debug msg="miscellaneous/cassandra-spark -> miscellaneous_cassandra_spark" +----------------------------------------------+--------+ | NAME | RESULT | +----------------------------------------------+--------+ | miscellaneous_artifacts | passed | | miscellaneous_collector_otlp_allinone_http | passed | | miscellaneous_set_custom_img | passed | | miscellaneous_collector_otlp_production_http | passed | | miscellaneous_collector_otlp_production_grpc | passed | | miscellaneous_collector_autoscale | passed | | miscellaneous_collector_otlp_allinone_grpc | passed | | miscellaneous_cassandra_spark | passed | +----------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh elasticsearch false true + '[' 3 -ne 3 ']' + test_suite_name=elasticsearch + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/elasticsearch.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-elasticsearch make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true \ KAFKA_VERSION=3.6.0 \ SKIP_KAFKA=false \ ./tests/e2e/elasticsearch/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.18.0-0.nightly-2024-11-30-141716 True False 19m Cluster version is 4.18.0-0.nightly-2024-11-30-141716' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.18.0-0.nightly-2024-11-30-141716 True False 19m Cluster version is 4.18.0-0.nightly-2024-11-30-141716' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/elasticsearch/render.sh ++ export SUITE_DIR=./tests/e2e/elasticsearch ++ SUITE_DIR=./tests/e2e/elasticsearch ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/elasticsearch ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + start_test es-from-aio-to-production + '[' 1 -ne 1 ']' + test_name=es-from-aio-to-production + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-from-aio-to-production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-from-aio-to-production\e[0m' Rendering files for test es-from-aio-to-production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-from-aio-to-production + cd es-from-aio-to-production + jaeger_name=my-jaeger + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 03 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=03 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./03-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch.redundancyPolicy="ZeroRedundancy"' ./03-install.yaml + render_smoke_test my-jaeger true 04 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=04 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./04-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test es-increasing-replicas + '[' 1 -ne 1 ']' + test_name=es-increasing-replicas + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-increasing-replicas' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-increasing-replicas\e[0m' Rendering files for test es-increasing-replicas + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-from-aio-to-production + '[' es-from-aio-to-production '!=' _build ']' + cd .. + mkdir -p es-increasing-replicas + cd es-increasing-replicas + jaeger_name=simple-prod + '[' true = true ']' + jaeger_deployment_mode=production_autoprovisioned + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.replicas=2 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.query.replicas=2 ./02-install.yaml + cp ./01-assert.yaml ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.replicas=2 ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .status.readyReplicas=2 ./02-assert.yaml + render_smoke_test simple-prod true 03 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=03 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./03-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + cp ./02-install.yaml ./04-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.elasticsearch.nodeCount=2 ./04-install.yaml + /tmp/jaeger-tests/bin/gomplate -f ./openshift-check-es-nodes.yaml.template -o ./05-check-es-nodes.yaml + '[' true = true ']' + skip_test es-index-cleaner-upstream 'SKIP_ES_EXTERNAL is true' + '[' 2 -ne 2 ']' + test_name=es-index-cleaner-upstream + message='SKIP_ES_EXTERNAL is true' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-increasing-replicas + '[' es-increasing-replicas '!=' _build ']' + cd .. + rm -rf es-index-cleaner-upstream + warning 'es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true\e[0m' WAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true + '[' true = true ']' + es_index_cleaner -autoprov production_autoprovisioned + '[' 2 -ne 2 ']' + postfix=-autoprov + jaeger_deployment_strategy=production_autoprovisioned + start_test es-index-cleaner-autoprov + '[' 1 -ne 1 ']' + test_name=es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-index-cleaner-autoprov' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-index-cleaner-autoprov\e[0m' Rendering files for test es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-index-cleaner-autoprov + cd es-index-cleaner-autoprov + jaeger_name=test-es-index-cleaner-with-prefix + cronjob_name=test-es-index-cleaner-with-prefix-es-index-cleaner + secured_es_connection=false + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_managed_es ']' + ELASTICSEARCH_URL=https://elasticsearch + secured_es_connection=true + cp ../../es-index-cleaner-upstream/04-assert.yaml ../../es-index-cleaner-upstream/README.md . + render_install_jaeger test-es-index-cleaner-with-prefix production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options.es.index-prefix=""' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.enabled=false ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.numberOfDays=0 ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.esIndexCleaner.schedule="*/1 * * * *"' ./01-install.yaml + render_report_spans test-es-index-cleaner-with-prefix true 5 00 true 02 + '[' 6 -ne 6 ']' + jaeger=test-es-index-cleaner-with-prefix + is_secured=true + number_of_spans=5 + job_number=00 + ensure_reported_spans=true + test_step=02 + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + export JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + export JOB_NUMBER=00 + JOB_NUMBER=00 + export DAYS=5 + DAYS=5 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./02-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./02-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + sed 's~enabled: false~enabled: true~gi' ./01-install.yaml + CRONJOB_NAME=test-es-index-cleaner-with-prefix-es-index-cleaner + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/wait-for-cronjob-execution.yaml.template -o ./04-wait-es-index-cleaner.yaml + /tmp/jaeger-tests/bin/gomplate -f ./01-install.yaml -o ./05-install.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' 00 06 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + job_number=00 + test_step=06 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=test-es-index-cleaner-with-prefix-curator + JOB_NUMBER=00 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + MOUNT_SECRET=test-es-index-cleaner-with-prefix-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./06-check-indices.yaml + JOB_NUMBER=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./06-assert.yaml + '[' true = true ']' + get_elasticsearch_openshift_operator_version + export ESO_OPERATOR_VERSION + '[' true = true ']' ++ kubectl get pods -l name=elasticsearch-operator --all-namespaces '-o=jsonpath={.items[0].metadata.annotations.operatorframework\.io/properties}' + properties='{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.18},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.15"}}]}' + '[' -z '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.18},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.15"}}]}' ']' ++ echo '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.18},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.15"}}]}' ++ /tmp/jaeger-tests/bin/yq e -P '.properties.[] | select(.value.packageName == "elasticsearch-operator") | .value.version' + ESO_OPERATOR_VERSION=5.8.15 ++ version_ge 5.8.15 5.4 +++ echo 5.8.15 5.4 +++ tr ' ' '\n' +++ sort -rV +++ head -n 1 ++ test 5.8.15 == 5.8.15 + '[' -n '' ']' + skip_test es-index-cleaner-managed 'Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 2 -ne 2 ']' + test_name=es-index-cleaner-managed + message='Test only supported with Elasticsearch OpenShift Operator >= 5.4' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-index-cleaner-autoprov + '[' es-index-cleaner-autoprov '!=' _build ']' + cd .. + rm -rf es-index-cleaner-managed + warning 'es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4\e[0m' WAR: es-index-cleaner-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4 + '[' true = true ']' + start_test es-multiinstance + '[' 1 -ne 1 ']' + test_name=es-multiinstance + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-multiinstance' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-multiinstance\e[0m' Rendering files for test es-multiinstance + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-multiinstance + cd es-multiinstance + jaeger_name=instance-1 + render_install_jaeger instance-1 production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=instance-1 + JAEGER_NAME=instance-1 + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/gomplate -f ./03-create-second-instance.yaml.template -o 03-create-second-instance.yaml + '[' true = true ']' + skip_test es-rollover-upstream 'SKIP_ES_EXTERNAL is true' + '[' 2 -ne 2 ']' + test_name=es-rollover-upstream + message='SKIP_ES_EXTERNAL is true' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-multiinstance + '[' es-multiinstance '!=' _build ']' + cd .. + rm -rf es-rollover-upstream + warning 'es-rollover-upstream: SKIP_ES_EXTERNAL is true' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-rollover-upstream: SKIP_ES_EXTERNAL is true\e[0m' WAR: es-rollover-upstream: SKIP_ES_EXTERNAL is true + '[' true = true ']' + es_rollover -autoprov production_autoprovisioned + '[' 2 -ne 2 ']' + postfix=-autoprov + jaeger_deployment_strategy=production_autoprovisioned + start_test es-rollover-autoprov + '[' 1 -ne 1 ']' + test_name=es-rollover-autoprov + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-rollover-autoprov' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-rollover-autoprov\e[0m' Rendering files for test es-rollover-autoprov + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-rollover-autoprov + cd es-rollover-autoprov + cp ../../es-rollover-upstream/05-assert.yaml ../../es-rollover-upstream/05-install.yaml ../../es-rollover-upstream/README.md . + jaeger_name=my-jaeger + secured_es_connection=false + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_managed_es ']' + ELASTICSEARCH_URL=https://elasticsearch + secured_es_connection=true + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_report_spans my-jaeger true 2 00 true 02 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=00 + ensure_reported_spans=true + test_step=02 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=00 + JOB_NUMBER=00 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./02-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./02-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' 00 03 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + job_number=00 + test_step=03 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=00 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./03-check-indices.yaml + JOB_NUMBER=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./03-assert.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' 01 04 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + job_number=01 + test_step=04 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=01 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./04-check-indices.yaml + JOB_NUMBER=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./04-assert.yaml + render_report_spans my-jaeger true 2 02 true 06 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=02 + ensure_reported_spans=true + test_step=06 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=02 + JOB_NUMBER=02 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./06-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./06-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' 02 07 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + job_number=02 + test_step=07 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=02 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./07-check-indices.yaml + JOB_NUMBER=02 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./07-assert.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' 03 08 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + job_number=03 + test_step=08 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{6}'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=03 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{6}'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./08-check-indices.yaml + JOB_NUMBER=03 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./08-assert.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' 04 09 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + job_number=04 + test_step=09 + escape_command ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=04 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-exist'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./09-check-indices.yaml + JOB_NUMBER=04 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./09-assert.yaml + render_report_spans my-jaeger true 2 03 true 10 + '[' 6 -ne 6 ']' + jaeger=my-jaeger + is_secured=true + number_of_spans=2 + job_number=03 + ensure_reported_spans=true + test_step=10 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JOB_NUMBER=03 + JOB_NUMBER=03 + export DAYS=2 + DAYS=2 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./10-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./10-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + CRONJOB_NAME=my-jaeger-es-rollover + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/wait-for-cronjob-execution.yaml.template -o ./11-wait-rollover.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-000002'\'',' 05 11 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-000002'\'',' + job_number=05 + test_step=11 + escape_command ''\''--name'\'', '\''jaeger-span-000002'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-000002'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-000002'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-000002'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-000002'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=05 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-000002'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./11-check-indices.yaml + JOB_NUMBER=05 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./11-assert.yaml + render_check_indices true ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' 06 12 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + job_number=06 + test_step=12 + escape_command ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + '[' 1 -ne 1 ']' + command=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' ++ echo ''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=my-jaeger-curator + JOB_NUMBER=06 + CMD_PARAMETERS=''\''--name'\'', '\''jaeger-span-read'\'', '\''--assert-count-docs'\'', '\''4'\'', '\''--jaeger-service'\'', '\''smoke-test-service'\'',' + MOUNT_SECRET=my-jaeger-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./12-check-indices.yaml + JOB_NUMBER=06 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./12-assert.yaml + '[' true = true ']' + get_elasticsearch_openshift_operator_version + export ESO_OPERATOR_VERSION + '[' true = true ']' ++ kubectl get pods -l name=elasticsearch-operator --all-namespaces '-o=jsonpath={.items[0].metadata.annotations.operatorframework\.io/properties}' + properties='{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.18},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.15"}}]}' + '[' -z '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.18},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.15"}}]}' ']' ++ echo '{"properties":[{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Elasticsearch","version":"v1"}},{"type":"olm.gvk","value":{"group":"logging.openshift.io","kind":"Kibana","version":"v1"}},{"type":"olm.maxOpenShiftVersion","value":4.18},{"type":"olm.package","value":{"packageName":"elasticsearch-operator","version":"5.8.15"}}]}' ++ /tmp/jaeger-tests/bin/yq e -P '.properties.[] | select(.value.packageName == "elasticsearch-operator") | .value.version' + ESO_OPERATOR_VERSION=5.8.15 ++ version_ge 5.8.15 5.4 +++ echo 5.8.15 5.4 +++ tr ' ' '\n' +++ sort -rV +++ head -n 1 ++ test 5.8.15 == 5.8.15 + '[' -n '' ']' + skip_test es-rollover-managed 'Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 2 -ne 2 ']' + test_name=es-rollover-managed + message='Test only supported with Elasticsearch OpenShift Operator >= 5.4' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-rollover-autoprov + '[' es-rollover-autoprov '!=' _build ']' + cd .. + rm -rf es-rollover-managed + warning 'es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4\e[0m' WAR: es-rollover-managed: Test only supported with Elasticsearch OpenShift Operator >= 5.4 + '[' true = true ']' + skip_test es-spark-dependencies 'This test is not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=es-spark-dependencies + message='This test is not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + rm -rf es-spark-dependencies + warning 'es-spark-dependencies: This test is not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-spark-dependencies: This test is not supported in OpenShift\e[0m' WAR: es-spark-dependencies: This test is not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running elasticsearch E2E tests' Running elasticsearch E2E tests + cd tests/e2e/elasticsearch/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1113221797 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 7 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/es-from-aio-to-production === PAUSE kuttl/harness/es-from-aio-to-production === RUN kuttl/harness/es-increasing-replicas === PAUSE kuttl/harness/es-increasing-replicas === RUN kuttl/harness/es-index-cleaner-autoprov === PAUSE kuttl/harness/es-index-cleaner-autoprov === RUN kuttl/harness/es-multiinstance === PAUSE kuttl/harness/es-multiinstance === RUN kuttl/harness/es-rollover-autoprov === PAUSE kuttl/harness/es-rollover-autoprov === RUN kuttl/harness/es-simple-prod === PAUSE kuttl/harness/es-simple-prod === CONT kuttl/harness/artifacts logger.go:42: 13:28:41 | artifacts | Creating namespace: kuttl-test-united-peacock logger.go:42: 13:28:41 | artifacts | artifacts events from ns kuttl-test-united-peacock: logger.go:42: 13:28:41 | artifacts | Deleting namespace: kuttl-test-united-peacock === CONT kuttl/harness/es-multiinstance logger.go:42: 13:28:47 | es-multiinstance | Ignoring 03-create-second-instance.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:28:47 | es-multiinstance | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:28:47 | es-multiinstance | Creating namespace: kuttl-test-winning-molly logger.go:42: 13:28:47 | es-multiinstance/0-clear-namespace | starting test step 0-clear-namespace logger.go:42: 13:28:47 | es-multiinstance/0-clear-namespace | running command: [sh -c kubectl delete namespace jaeger-e2e-multiinstance-test --ignore-not-found=true] logger.go:42: 13:28:47 | es-multiinstance/0-clear-namespace | test step completed 0-clear-namespace logger.go:42: 13:28:47 | es-multiinstance/1-install | starting test step 1-install logger.go:42: 13:28:47 | es-multiinstance/1-install | Jaeger:kuttl-test-winning-molly/instance-1 created logger.go:42: 13:29:42 | es-multiinstance/1-install | test step completed 1-install logger.go:42: 13:29:42 | es-multiinstance/2-create-namespace | starting test step 2-create-namespace logger.go:42: 13:29:42 | es-multiinstance/2-create-namespace | running command: [sh -c kubectl create namespace jaeger-e2e-multiinstance-test] logger.go:42: 13:29:42 | es-multiinstance/2-create-namespace | namespace/jaeger-e2e-multiinstance-test created logger.go:42: 13:29:42 | es-multiinstance/2-create-namespace | test step completed 2-create-namespace logger.go:42: 13:29:42 | es-multiinstance/3-create-second-instance | starting test step 3-create-second-instance logger.go:42: 13:29:42 | es-multiinstance/3-create-second-instance | running command: [sh -c kubectl apply -f ./01-install.yaml -n jaeger-e2e-multiinstance-test] logger.go:42: 13:29:42 | es-multiinstance/3-create-second-instance | jaeger.jaegertracing.io/instance-1 created logger.go:42: 13:29:42 | es-multiinstance/3-create-second-instance | running command: [sh -c /tmp/jaeger-tests/bin/kubectl-kuttl assert ./01-assert.yaml -n jaeger-e2e-multiinstance-test --timeout 1000] logger.go:42: 13:30:53 | es-multiinstance/3-create-second-instance | assert is valid logger.go:42: 13:30:53 | es-multiinstance/3-create-second-instance | test step completed 3-create-second-instance logger.go:42: 13:30:53 | es-multiinstance/4-check-secrets | starting test step 4-check-secrets logger.go:42: 13:30:53 | es-multiinstance/4-check-secrets | running command: [sh -c kubectl get secrets elasticsearch -o jsonpath='{.data.logging-es\.crt}' -n $NAMESPACE > secret1] logger.go:42: 13:30:53 | es-multiinstance/4-check-secrets | running command: [sh -c kubectl get secrets elasticsearch -o jsonpath='{.data.logging-es\.crt}' -n jaeger-e2e-multiinstance-test > secret2] logger.go:42: 13:30:54 | es-multiinstance/4-check-secrets | running command: [sh -c cmp --silent secret1 secret2 || exit 0] logger.go:42: 13:30:54 | es-multiinstance/4-check-secrets | test step completed 4-check-secrets logger.go:42: 13:30:54 | es-multiinstance/5-delete | starting test step 5-delete logger.go:42: 13:30:54 | es-multiinstance/5-delete | running command: [sh -c kubectl delete namespace jaeger-e2e-multiinstance-test --wait=false] logger.go:42: 13:30:54 | es-multiinstance/5-delete | namespace "jaeger-e2e-multiinstance-test" deleted logger.go:42: 13:30:54 | es-multiinstance/5-delete | test step completed 5-delete logger.go:42: 13:30:54 | es-multiinstance | es-multiinstance events from ns kuttl-test-winning-molly: logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:12 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestwinningmollyinstance1-1-6d4bc5f95c SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestwinningmollyinstance1-1-6d4bc5fj745m replicaset-controller logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:12 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestwinningmollyinstance1-1-6d4bc5fj745m Binding Scheduled Successfully assigned kuttl-test-winning-molly/elasticsearch-cdm-kuttltestwinningmollyinstance1-1-6d4bc5fj745m to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:12 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestwinningmollyinstance1-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestwinningmollyinstance1-1-6d4bc5f95c to 1 deployment-controller logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestwinningmollyinstance1-1-6d4bc5fj745m AddedInterface Add eth0 [10.128.2.31/23] from ovn-kubernetes multus logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestwinningmollyinstance1-1-6d4bc5fj745m.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:8e4fbea4983cd58352349ca291383169b286bc166fad95a87807552ca43335e6" already present on machine kubelet logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestwinningmollyinstance1-1-6d4bc5fj745m.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestwinningmollyinstance1-1-6d4bc5fj745m.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestwinningmollyinstance1-1-6d4bc5fj745m.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:d68824b0b2c84db8e33edf9ab344eb684c4a7ebd7ef162bbc309043adcb28e6b" already present on machine kubelet logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestwinningmollyinstance1-1-6d4bc5fj745m.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:13 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestwinningmollyinstance1-1-6d4bc5fj745m.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:28 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestwinningmollyinstance1-1-6d4bc5fj745m.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:39 +0000 UTC Normal Pod instance-1-collector-69764f8cfb-qjz8z Binding Scheduled Successfully assigned kuttl-test-winning-molly/instance-1-collector-69764f8cfb-qjz8z to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:39 +0000 UTC Normal Pod instance-1-collector-69764f8cfb-qjz8z AddedInterface Add eth0 [10.131.0.27/23] from ovn-kubernetes multus logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:39 +0000 UTC Normal Pod instance-1-collector-69764f8cfb-qjz8z.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:44686d560aa27cae8ff8693f88c4cb6e2edf1737010ec1f80709cb42250b729d" already present on machine kubelet logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:39 +0000 UTC Normal ReplicaSet.apps instance-1-collector-69764f8cfb SuccessfulCreate Created pod: instance-1-collector-69764f8cfb-qjz8z replicaset-controller logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:39 +0000 UTC Normal Deployment.apps instance-1-collector ScalingReplicaSet Scaled up replica set instance-1-collector-69764f8cfb to 1 deployment-controller logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:39 +0000 UTC Normal Pod instance-1-query-869c77ff4-ddd4n Binding Scheduled Successfully assigned kuttl-test-winning-molly/instance-1-query-869c77ff4-ddd4n to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:39 +0000 UTC Normal Pod instance-1-query-869c77ff4-ddd4n AddedInterface Add eth0 [10.129.2.28/23] from ovn-kubernetes multus logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:39 +0000 UTC Normal Pod instance-1-query-869c77ff4-ddd4n.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:39 +0000 UTC Normal ReplicaSet.apps instance-1-query-869c77ff4 SuccessfulCreate Created pod: instance-1-query-869c77ff4-ddd4n replicaset-controller logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:39 +0000 UTC Normal Deployment.apps instance-1-query ScalingReplicaSet Scaled up replica set instance-1-query-869c77ff4 to 1 deployment-controller logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:40 +0000 UTC Normal Pod instance-1-collector-69764f8cfb-qjz8z.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:40 +0000 UTC Normal Pod instance-1-collector-69764f8cfb-qjz8z.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:40 +0000 UTC Normal Pod instance-1-query-869c77ff4-ddd4n.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:40 +0000 UTC Normal Pod instance-1-query-869c77ff4-ddd4n.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:40 +0000 UTC Normal Pod instance-1-query-869c77ff4-ddd4n.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:40 +0000 UTC Normal Pod instance-1-query-869c77ff4-ddd4n.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:40 +0000 UTC Normal Pod instance-1-query-869c77ff4-ddd4n.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:40 +0000 UTC Normal Pod instance-1-query-869c77ff4-ddd4n.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:40 +0000 UTC Normal Pod instance-1-query-869c77ff4-ddd4n.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:40 +0000 UTC Normal Pod instance-1-query-869c77ff4-ddd4n.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:54 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:54 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:29:54 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:30:09 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:30:09 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod instance-1-collector-69764f8cfb-qjz8z horizontal-pod-autoscaler logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:30:09 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:30:24 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod instance-1-collector-69764f8cfb-qjz8z horizontal-pod-autoscaler logger.go:42: 13:30:54 | es-multiinstance | 2024-12-02 13:30:24 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling instance-1-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod instance-1-collector-69764f8cfb-qjz8z horizontal-pod-autoscaler logger.go:42: 13:30:54 | es-multiinstance | Deleting namespace: kuttl-test-winning-molly === CONT kuttl/harness/es-simple-prod logger.go:42: 13:31:02 | es-simple-prod | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:31:02 | es-simple-prod | Creating namespace: kuttl-test-romantic-goat logger.go:42: 13:31:02 | es-simple-prod | es-simple-prod events from ns kuttl-test-romantic-goat: logger.go:42: 13:31:02 | es-simple-prod | Deleting namespace: kuttl-test-romantic-goat === CONT kuttl/harness/es-rollover-autoprov logger.go:42: 13:31:08 | es-rollover-autoprov | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:31:08 | es-rollover-autoprov | Creating namespace: kuttl-test-deciding-garfish logger.go:42: 13:31:08 | es-rollover-autoprov/1-install | starting test step 1-install logger.go:42: 13:31:08 | es-rollover-autoprov/1-install | Jaeger:kuttl-test-deciding-garfish/my-jaeger created logger.go:42: 13:31:54 | es-rollover-autoprov/1-install | test step completed 1-install logger.go:42: 13:31:54 | es-rollover-autoprov/2-report-spans | starting test step 2-report-spans logger.go:42: 13:31:54 | es-rollover-autoprov/2-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 13:31:56 | es-rollover-autoprov/2-report-spans | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:32:05 | es-rollover-autoprov/2-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JOB_NUMBER=00 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-00-job.yaml] logger.go:42: 13:32:05 | es-rollover-autoprov/2-report-spans | running command: [sh -c kubectl apply -f report-span-00-job.yaml -n $NAMESPACE] logger.go:42: 13:32:06 | es-rollover-autoprov/2-report-spans | job.batch/00-report-span created logger.go:42: 13:32:31 | es-rollover-autoprov/2-report-spans | test step completed 2-report-spans logger.go:42: 13:32:31 | es-rollover-autoprov/3-check-indices | starting test step 3-check-indices logger.go:42: 13:32:31 | es-rollover-autoprov/3-check-indices | Job:kuttl-test-deciding-garfish/00-check-indices created logger.go:42: 13:32:43 | es-rollover-autoprov/3-check-indices | test step completed 3-check-indices logger.go:42: 13:32:43 | es-rollover-autoprov/4-check-indices | starting test step 4-check-indices logger.go:42: 13:32:43 | es-rollover-autoprov/4-check-indices | Job:kuttl-test-deciding-garfish/01-check-indices created logger.go:42: 13:32:48 | es-rollover-autoprov/4-check-indices | test step completed 4-check-indices logger.go:42: 13:32:48 | es-rollover-autoprov/5-install | starting test step 5-install logger.go:42: 13:32:48 | es-rollover-autoprov/5-install | Jaeger:kuttl-test-deciding-garfish/my-jaeger updated logger.go:42: 13:33:04 | es-rollover-autoprov/5-install | test step completed 5-install logger.go:42: 13:33:04 | es-rollover-autoprov/6-report-spans | starting test step 6-report-spans logger.go:42: 13:33:04 | es-rollover-autoprov/6-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 13:33:12 | es-rollover-autoprov/6-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JOB_NUMBER=02 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-02-job.yaml] logger.go:42: 13:33:12 | es-rollover-autoprov/6-report-spans | running command: [sh -c kubectl apply -f report-span-02-job.yaml -n $NAMESPACE] logger.go:42: 13:33:12 | es-rollover-autoprov/6-report-spans | job.batch/02-report-span created logger.go:42: 13:33:37 | es-rollover-autoprov/6-report-spans | test step completed 6-report-spans logger.go:42: 13:33:37 | es-rollover-autoprov/7-check-indices | starting test step 7-check-indices logger.go:42: 13:33:37 | es-rollover-autoprov/7-check-indices | Job:kuttl-test-deciding-garfish/02-check-indices created logger.go:42: 13:33:42 | es-rollover-autoprov/7-check-indices | test step completed 7-check-indices logger.go:42: 13:33:42 | es-rollover-autoprov/8-check-indices | starting test step 8-check-indices logger.go:42: 13:33:42 | es-rollover-autoprov/8-check-indices | Job:kuttl-test-deciding-garfish/03-check-indices created logger.go:42: 13:33:47 | es-rollover-autoprov/8-check-indices | test step completed 8-check-indices logger.go:42: 13:33:47 | es-rollover-autoprov/9-check-indices | starting test step 9-check-indices logger.go:42: 13:33:47 | es-rollover-autoprov/9-check-indices | Job:kuttl-test-deciding-garfish/04-check-indices created logger.go:42: 13:33:51 | es-rollover-autoprov/9-check-indices | test step completed 9-check-indices logger.go:42: 13:33:51 | es-rollover-autoprov/10-report-spans | starting test step 10-report-spans logger.go:42: 13:33:51 | es-rollover-autoprov/10-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 13:33:59 | es-rollover-autoprov/10-report-spans | running command: [sh -c DAYS=2 ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JOB_NUMBER=03 JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-03-job.yaml] logger.go:42: 13:34:00 | es-rollover-autoprov/10-report-spans | running command: [sh -c kubectl apply -f report-span-03-job.yaml -n $NAMESPACE] logger.go:42: 13:34:00 | es-rollover-autoprov/10-report-spans | job.batch/03-report-span created logger.go:42: 13:34:27 | es-rollover-autoprov/10-report-spans | test step completed 10-report-spans logger.go:42: 13:34:27 | es-rollover-autoprov/11-check-indices | starting test step 11-check-indices logger.go:42: 13:34:27 | es-rollover-autoprov/11-check-indices | running command: [sh -c go run ../../../../cmd-utils/wait-cronjob/main.go --cronjob my-jaeger-es-rollover --namespace $NAMESPACE] logger.go:42: 13:34:37 | es-rollover-autoprov/11-check-indices | time="2024-12-02T13:34:37Z" level=debug msg="Checking if the my-jaeger-es-rollover CronJob exists" logger.go:42: 13:34:37 | es-rollover-autoprov/11-check-indices | time="2024-12-02T13:34:37Z" level=debug msg="No BatchV1beta1/Cronjobs were found" logger.go:42: 13:34:37 | es-rollover-autoprov/11-check-indices | time="2024-12-02T13:34:37Z" level=info msg="Cronjob my-jaeger-es-rollover found successfully" logger.go:42: 13:34:37 | es-rollover-autoprov/11-check-indices | time="2024-12-02T13:34:37Z" level=debug msg="Waiting for the next scheduled job from my-jaeger-es-rollover cronjob" logger.go:42: 13:34:37 | es-rollover-autoprov/11-check-indices | time="2024-12-02T13:34:37Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 13:34:47 | es-rollover-autoprov/11-check-indices | time="2024-12-02T13:34:47Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 13:34:57 | es-rollover-autoprov/11-check-indices | time="2024-12-02T13:34:57Z" level=debug msg="Waiting for next job from my-jaeger-es-rollover to succeed" logger.go:42: 13:35:07 | es-rollover-autoprov/11-check-indices | time="2024-12-02T13:35:07Z" level=info msg="Job of owner my-jaeger-es-rollover succeeded after my-jaeger-es-rollover 30.053256041s" logger.go:42: 13:35:07 | es-rollover-autoprov/11-check-indices | Job:kuttl-test-deciding-garfish/05-check-indices created logger.go:42: 13:35:15 | es-rollover-autoprov/11-check-indices | test step completed 11-check-indices logger.go:42: 13:35:15 | es-rollover-autoprov/12-check-indices | starting test step 12-check-indices logger.go:42: 13:35:15 | es-rollover-autoprov/12-check-indices | Job:kuttl-test-deciding-garfish/06-check-indices created logger.go:42: 13:35:21 | es-rollover-autoprov/12-check-indices | test step completed 12-check-indices logger.go:42: 13:35:21 | es-rollover-autoprov | es-rollover-autoprov events from ns kuttl-test-deciding-garfish: logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:24 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestdecidinggarfishmyjaeger-1-787f5465f9 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestdecidinggarfishmyjaeger-1-787f5lsjlz replicaset-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:24 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdecidinggarfishmyjaeger-1-787f5lsjlz Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/elasticsearch-cdm-kuttltestdecidinggarfishmyjaeger-1-787f5lsjlz to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:24 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestdecidinggarfishmyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestdecidinggarfishmyjaeger-1-787f5465f9 to 1 deployment-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdecidinggarfishmyjaeger-1-787f5lsjlz AddedInterface Add eth0 [10.128.2.33/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdecidinggarfishmyjaeger-1-787f5lsjlz.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:8e4fbea4983cd58352349ca291383169b286bc166fad95a87807552ca43335e6" already present on machine kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdecidinggarfishmyjaeger-1-787f5lsjlz.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdecidinggarfishmyjaeger-1-787f5lsjlz.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdecidinggarfishmyjaeger-1-787f5lsjlz.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:d68824b0b2c84db8e33edf9ab344eb684c4a7ebd7ef162bbc309043adcb28e6b" already present on machine kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdecidinggarfishmyjaeger-1-787f5lsjlz.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestdecidinggarfishmyjaeger-1-787f5lsjlz.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:40 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestdecidinggarfishmyjaeger-1-787f5lsjlz.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:51 +0000 UTC Normal Pod my-jaeger-collector-5b44b744d6-7qk4g Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/my-jaeger-collector-5b44b744d6-7qk4g to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:51 +0000 UTC Normal Pod my-jaeger-collector-5b44b744d6-7qk4g AddedInterface Add eth0 [10.131.0.29/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:51 +0000 UTC Normal Pod my-jaeger-collector-5b44b744d6-7qk4g.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:44686d560aa27cae8ff8693f88c4cb6e2edf1737010ec1f80709cb42250b729d" already present on machine kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:51 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5b44b744d6 SuccessfulCreate Created pod: my-jaeger-collector-5b44b744d6-7qk4g replicaset-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:51 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-5b44b744d6 to 1 deployment-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:51 +0000 UTC Normal Pod my-jaeger-query-6d484b9566-455nc Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/my-jaeger-query-6d484b9566-455nc to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:51 +0000 UTC Normal Pod my-jaeger-query-6d484b9566-455nc AddedInterface Add eth0 [10.129.2.31/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:51 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-6d484b9566 SuccessfulCreate Created pod: my-jaeger-query-6d484b9566-455nc replicaset-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:51 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-6d484b9566 to 1 deployment-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:52 +0000 UTC Normal Pod my-jaeger-collector-5b44b744d6-7qk4g.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:52 +0000 UTC Normal Pod my-jaeger-collector-5b44b744d6-7qk4g.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:52 +0000 UTC Normal Pod my-jaeger-query-6d484b9566-455nc.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:52 +0000 UTC Normal Pod my-jaeger-query-6d484b9566-455nc.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:52 +0000 UTC Normal Pod my-jaeger-query-6d484b9566-455nc.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:52 +0000 UTC Normal Pod my-jaeger-query-6d484b9566-455nc.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:52 +0000 UTC Normal Pod my-jaeger-query-6d484b9566-455nc.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:52 +0000 UTC Normal Pod my-jaeger-query-6d484b9566-455nc.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:52 +0000 UTC Normal Pod my-jaeger-query-6d484b9566-455nc.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:52 +0000 UTC Normal Pod my-jaeger-query-6d484b9566-455nc.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:31:52 +0000 UTC Normal Pod my-jaeger-query-6d484b9566-455nc.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:00 +0000 UTC Normal Pod my-jaeger-query-6d484b9566-455nc.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:00 +0000 UTC Normal Pod my-jaeger-query-6d484b9566-455nc.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:00 +0000 UTC Normal Pod my-jaeger-query-6d484b9566-455nc.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:00 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-6d484b9566 SuccessfulDelete Deleted pod: my-jaeger-query-6d484b9566-455nc replicaset-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:00 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-6d484b9566 to 0 from 1 deployment-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:01 +0000 UTC Normal Pod my-jaeger-query-5cdd864fc6-2hnmd Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/my-jaeger-query-5cdd864fc6-2hnmd to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:01 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-5cdd864fc6 SuccessfulCreate Created pod: my-jaeger-query-5cdd864fc6-2hnmd replicaset-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:01 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-5cdd864fc6 to 1 deployment-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:02 +0000 UTC Normal Pod my-jaeger-query-5cdd864fc6-2hnmd AddedInterface Add eth0 [10.129.2.32/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:02 +0000 UTC Normal Pod my-jaeger-query-5cdd864fc6-2hnmd.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:02 +0000 UTC Normal Pod my-jaeger-query-5cdd864fc6-2hnmd.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:02 +0000 UTC Normal Pod my-jaeger-query-5cdd864fc6-2hnmd.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:02 +0000 UTC Normal Pod my-jaeger-query-5cdd864fc6-2hnmd.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:02 +0000 UTC Normal Pod my-jaeger-query-5cdd864fc6-2hnmd.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:02 +0000 UTC Normal Pod my-jaeger-query-5cdd864fc6-2hnmd.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:02 +0000 UTC Normal Pod my-jaeger-query-5cdd864fc6-2hnmd.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:02 +0000 UTC Normal Pod my-jaeger-query-5cdd864fc6-2hnmd.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:02 +0000 UTC Normal Pod my-jaeger-query-5cdd864fc6-2hnmd.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:06 +0000 UTC Normal Pod 00-report-span-kcm9l Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/00-report-span-kcm9l to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:06 +0000 UTC Normal Pod 00-report-span-kcm9l AddedInterface Add eth0 [10.131.0.30/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:06 +0000 UTC Normal Pod 00-report-span-kcm9l.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:06 +0000 UTC Normal Job.batch 00-report-span SuccessfulCreate Created pod: 00-report-span-kcm9l job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:06 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:06 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:06 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:08 +0000 UTC Normal Pod 00-report-span-kcm9l.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 1.962s (1.962s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:09 +0000 UTC Normal Pod 00-report-span-kcm9l.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:09 +0000 UTC Normal Pod 00-report-span-kcm9l.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:31 +0000 UTC Normal Pod 00-check-indices-v9pmj Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/00-check-indices-v9pmj to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:31 +0000 UTC Normal Job.batch 00-check-indices SuccessfulCreate Created pod: 00-check-indices-v9pmj job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:31 +0000 UTC Normal Job.batch 00-report-span Completed Job completed job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:32 +0000 UTC Normal Pod 00-check-indices-v9pmj AddedInterface Add eth0 [10.131.0.31/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:32 +0000 UTC Normal Pod 00-check-indices-v9pmj.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:36 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod my-jaeger-collector-5b44b744d6-7qk4g horizontal-pod-autoscaler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:39 +0000 UTC Normal Pod 00-check-indices-v9pmj.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 6.828s (6.828s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:39 +0000 UTC Normal Pod 00-check-indices-v9pmj.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:39 +0000 UTC Normal Pod 00-check-indices-v9pmj.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:42 +0000 UTC Normal Job.batch 00-check-indices Completed Job completed job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:43 +0000 UTC Normal Pod 01-check-indices-x5lx9 Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/01-check-indices-x5lx9 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:43 +0000 UTC Normal Pod 01-check-indices-x5lx9 AddedInterface Add eth0 [10.131.0.32/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:43 +0000 UTC Normal Pod 01-check-indices-x5lx9.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:43 +0000 UTC Normal Job.batch 01-check-indices SuccessfulCreate Created pod: 01-check-indices-x5lx9 job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:44 +0000 UTC Normal Pod 01-check-indices-x5lx9.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 672ms (672ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:44 +0000 UTC Normal Pod 01-check-indices-x5lx9.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:44 +0000 UTC Normal Pod 01-check-indices-x5lx9.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:47 +0000 UTC Normal Job.batch 01-check-indices Completed Job completed job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:49 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-cct6s Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/my-jaeger-es-rollover-create-mapping-cct6s to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:49 +0000 UTC Normal Job.batch my-jaeger-es-rollover-create-mapping SuccessfulCreate Created pod: my-jaeger-es-rollover-create-mapping-cct6s job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:50 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-cct6s AddedInterface Add eth0 [10.131.0.33/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:50 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-cct6s.spec.containers{my-jaeger-es-rollover-create-mapping} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:44283d1da2e4839c9a23cf2848ddc67d853a6d1fe532e04c642a6ecee08b25cb" kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:51 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod my-jaeger-collector-5b44b744d6-7qk4g horizontal-pod-autoscaler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:51 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod my-jaeger-collector-5b44b744d6-7qk4g horizontal-pod-autoscaler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:56 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-cct6s.spec.containers{my-jaeger-es-rollover-create-mapping} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:44283d1da2e4839c9a23cf2848ddc67d853a6d1fe532e04c642a6ecee08b25cb" in 5.951s (5.951s including waiting). Image size: 111742913 bytes. kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:56 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-cct6s.spec.containers{my-jaeger-es-rollover-create-mapping} Created Created container my-jaeger-es-rollover-create-mapping kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:56 +0000 UTC Normal Pod my-jaeger-es-rollover-create-mapping-cct6s.spec.containers{my-jaeger-es-rollover-create-mapping} Started Started container my-jaeger-es-rollover-create-mapping kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:59 +0000 UTC Normal Pod my-jaeger-collector-5b44b744d6-7qk4g.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:59 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5b44b744d6 SuccessfulDelete Deleted pod: my-jaeger-collector-5b44b744d6-7qk4g replicaset-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:59 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled down replica set my-jaeger-collector-5b44b744d6 to 0 from 1 deployment-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:59 +0000 UTC Normal Job.batch my-jaeger-es-rollover-create-mapping Completed Job completed job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:32:59 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled down replica set my-jaeger-query-5cdd864fc6 to 0 from 1 deployment-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:00 +0000 UTC Normal Pod my-jaeger-collector-56544488f9-5lc65 Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/my-jaeger-collector-56544488f9-5lc65 to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:00 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-56544488f9 SuccessfulCreate Created pod: my-jaeger-collector-56544488f9-5lc65 replicaset-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:00 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-56544488f9 to 1 deployment-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28885773-bq89m Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/my-jaeger-es-lookback-28885773-bq89m to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28885773-bq89m AddedInterface Add eth0 [10.131.0.35/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28885773-bq89m.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:44283d1da2e4839c9a23cf2848ddc67d853a6d1fe532e04c642a6ecee08b25cb" already present on machine kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28885773 SuccessfulCreate Created pod: my-jaeger-es-lookback-28885773-bq89m job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28885773 cronjob-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28885773-52pcv Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/my-jaeger-es-rollover-28885773-52pcv to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28885773-52pcv AddedInterface Add eth0 [10.131.0.34/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28885773-52pcv.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:44283d1da2e4839c9a23cf2848ddc67d853a6d1fe532e04c642a6ecee08b25cb" already present on machine kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28885773 SuccessfulCreate Created pod: my-jaeger-es-rollover-28885773-52pcv job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28885773 cronjob-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:00 +0000 UTC Normal Pod my-jaeger-query-5cdd864fc6-2hnmd.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:00 +0000 UTC Normal Pod my-jaeger-query-5cdd864fc6-2hnmd.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:00 +0000 UTC Normal Pod my-jaeger-query-5cdd864fc6-2hnmd.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:00 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-5cdd864fc6 SuccessfulDelete Deleted pod: my-jaeger-query-5cdd864fc6-2hnmd replicaset-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:01 +0000 UTC Normal Pod my-jaeger-collector-56544488f9-5lc65 AddedInterface Add eth0 [10.129.2.33/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:01 +0000 UTC Normal Pod my-jaeger-collector-56544488f9-5lc65.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:44686d560aa27cae8ff8693f88c4cb6e2edf1737010ec1f80709cb42250b729d" already present on machine kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:01 +0000 UTC Normal Pod my-jaeger-collector-56544488f9-5lc65.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:01 +0000 UTC Normal Pod my-jaeger-collector-56544488f9-5lc65.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:01 +0000 UTC Normal Pod my-jaeger-es-lookback-28885773-bq89m.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:01 +0000 UTC Normal Pod my-jaeger-es-lookback-28885773-bq89m.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:01 +0000 UTC Normal Pod my-jaeger-es-rollover-28885773-52pcv.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:01 +0000 UTC Normal Pod my-jaeger-es-rollover-28885773-52pcv.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:01 +0000 UTC Normal Pod my-jaeger-query-9fb595b89-2l9kv Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/my-jaeger-query-9fb595b89-2l9kv to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:01 +0000 UTC Normal Pod my-jaeger-query-9fb595b89-2l9kv AddedInterface Add eth0 [10.129.2.34/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:01 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-9fb595b89 SuccessfulCreate Created pod: my-jaeger-query-9fb595b89-2l9kv replicaset-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:01 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-9fb595b89 to 1 deployment-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:02 +0000 UTC Normal Pod my-jaeger-query-9fb595b89-2l9kv.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:02 +0000 UTC Normal Pod my-jaeger-query-9fb595b89-2l9kv.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:02 +0000 UTC Normal Pod my-jaeger-query-9fb595b89-2l9kv.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:02 +0000 UTC Normal Pod my-jaeger-query-9fb595b89-2l9kv.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:02 +0000 UTC Normal Pod my-jaeger-query-9fb595b89-2l9kv.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:02 +0000 UTC Normal Pod my-jaeger-query-9fb595b89-2l9kv.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:02 +0000 UTC Normal Pod my-jaeger-query-9fb595b89-2l9kv.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:02 +0000 UTC Normal Pod my-jaeger-query-9fb595b89-2l9kv.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:02 +0000 UTC Normal Pod my-jaeger-query-9fb595b89-2l9kv.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:03 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28885773 Completed Job completed job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:03 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28885773, condition: Complete cronjob-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:03 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28885773 Completed Job completed job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:03 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28885773, condition: Complete cronjob-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:12 +0000 UTC Normal Pod 02-report-span-5gv2g Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/02-report-span-5gv2g to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:12 +0000 UTC Normal Job.batch 02-report-span SuccessfulCreate Created pod: 02-report-span-5gv2g job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:13 +0000 UTC Normal Pod 02-report-span-5gv2g AddedInterface Add eth0 [10.131.0.36/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:13 +0000 UTC Normal Pod 02-report-span-5gv2g.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:14 +0000 UTC Normal Pod 02-report-span-5gv2g.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 799ms (799ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:14 +0000 UTC Normal Pod 02-report-span-5gv2g.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:14 +0000 UTC Normal Pod 02-report-span-5gv2g.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:36 +0000 UTC Normal Job.batch 02-report-span Completed Job completed job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:37 +0000 UTC Normal Pod 02-check-indices-xpvkr Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/02-check-indices-xpvkr to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:37 +0000 UTC Normal Job.batch 02-check-indices SuccessfulCreate Created pod: 02-check-indices-xpvkr job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:38 +0000 UTC Normal Pod 02-check-indices-xpvkr AddedInterface Add eth0 [10.131.0.37/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:38 +0000 UTC Normal Pod 02-check-indices-xpvkr.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:38 +0000 UTC Normal Pod 02-check-indices-xpvkr.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 521ms (521ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:38 +0000 UTC Normal Pod 02-check-indices-xpvkr.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:38 +0000 UTC Normal Pod 02-check-indices-xpvkr.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:41 +0000 UTC Normal Job.batch 02-check-indices Completed Job completed job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:42 +0000 UTC Normal Pod 03-check-indices-k5vv4 Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/03-check-indices-k5vv4 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:42 +0000 UTC Normal Job.batch 03-check-indices SuccessfulCreate Created pod: 03-check-indices-k5vv4 job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:43 +0000 UTC Normal Pod 03-check-indices-k5vv4 AddedInterface Add eth0 [10.131.0.38/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:43 +0000 UTC Normal Pod 03-check-indices-k5vv4.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:43 +0000 UTC Normal Pod 03-check-indices-k5vv4.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 662ms (662ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:44 +0000 UTC Normal Pod 03-check-indices-k5vv4.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:44 +0000 UTC Normal Pod 03-check-indices-k5vv4.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:46 +0000 UTC Normal Job.batch 03-check-indices Completed Job completed job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:47 +0000 UTC Normal Pod 04-check-indices-w67xn Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/04-check-indices-w67xn to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:47 +0000 UTC Normal Job.batch 04-check-indices SuccessfulCreate Created pod: 04-check-indices-w67xn job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:48 +0000 UTC Normal Pod 04-check-indices-w67xn AddedInterface Add eth0 [10.131.0.39/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:48 +0000 UTC Normal Pod 04-check-indices-w67xn.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:49 +0000 UTC Normal Pod 04-check-indices-w67xn.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 540ms (540ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:49 +0000 UTC Normal Pod 04-check-indices-w67xn.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:49 +0000 UTC Normal Pod 04-check-indices-w67xn.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:51 +0000 UTC Normal Job.batch 04-check-indices Completed Job completed job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:51 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod my-jaeger-collector-56544488f9-5lc65 horizontal-pod-autoscaler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:51 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod my-jaeger-collector-56544488f9-5lc65 horizontal-pod-autoscaler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:33:51 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod my-jaeger-collector-56544488f9-5lc65 horizontal-pod-autoscaler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:00 +0000 UTC Normal Pod 03-report-span-xml5z Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/03-report-span-xml5z to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:00 +0000 UTC Normal Job.batch 03-report-span SuccessfulCreate Created pod: 03-report-span-xml5z job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28885774-pppc7 Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/my-jaeger-es-lookback-28885774-pppc7 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28885774-pppc7 AddedInterface Add eth0 [10.131.0.40/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28885774-pppc7.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:44283d1da2e4839c9a23cf2848ddc67d853a6d1fe532e04c642a6ecee08b25cb" already present on machine kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28885774 SuccessfulCreate Created pod: my-jaeger-es-lookback-28885774-pppc7 job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28885774 cronjob-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28885774-d79t6 Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/my-jaeger-es-rollover-28885774-d79t6 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28885774-d79t6 AddedInterface Add eth0 [10.131.0.41/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28885774-d79t6.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:44283d1da2e4839c9a23cf2848ddc67d853a6d1fe532e04c642a6ecee08b25cb" already present on machine kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28885774 SuccessfulCreate Created pod: my-jaeger-es-rollover-28885774-d79t6 job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28885774 cronjob-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:01 +0000 UTC Normal Pod 03-report-span-xml5z AddedInterface Add eth0 [10.131.0.42/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:01 +0000 UTC Normal Pod 03-report-span-xml5z.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:01 +0000 UTC Normal Pod my-jaeger-es-lookback-28885774-pppc7.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:01 +0000 UTC Normal Pod my-jaeger-es-lookback-28885774-pppc7.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:01 +0000 UTC Normal Pod my-jaeger-es-rollover-28885774-d79t6.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:01 +0000 UTC Normal Pod my-jaeger-es-rollover-28885774-d79t6.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:04 +0000 UTC Normal Pod 03-report-span-xml5z.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 3.895s (3.895s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:04 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28885774 Completed Job completed job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:04 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28885774, condition: Complete cronjob-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:04 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28885774 Completed Job completed job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:04 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28885774, condition: Complete cronjob-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:05 +0000 UTC Normal Pod 03-report-span-xml5z.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:05 +0000 UTC Normal Pod 03-report-span-xml5z.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:34:27 +0000 UTC Normal Job.batch 03-report-span Completed Job completed job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28885775-z5hzf Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/my-jaeger-es-lookback-28885775-z5hzf to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28885775-z5hzf AddedInterface Add eth0 [10.131.0.44/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:00 +0000 UTC Normal Pod my-jaeger-es-lookback-28885775-z5hzf.spec.containers{my-jaeger-es-lookback} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:44283d1da2e4839c9a23cf2848ddc67d853a6d1fe532e04c642a6ecee08b25cb" already present on machine kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:00 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28885775 SuccessfulCreate Created pod: my-jaeger-es-lookback-28885775-z5hzf job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:00 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SuccessfulCreate Created job my-jaeger-es-lookback-28885775 cronjob-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28885775-vgzxm Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/my-jaeger-es-rollover-28885775-vgzxm to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28885775-vgzxm AddedInterface Add eth0 [10.131.0.43/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:00 +0000 UTC Normal Pod my-jaeger-es-rollover-28885775-vgzxm.spec.containers{my-jaeger-es-rollover} Pulled Container image "registry.redhat.io/rhosdt/jaeger-es-rollover-rhel8@sha256:44283d1da2e4839c9a23cf2848ddc67d853a6d1fe532e04c642a6ecee08b25cb" already present on machine kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:00 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28885775 SuccessfulCreate Created pod: my-jaeger-es-rollover-28885775-vgzxm job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:00 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SuccessfulCreate Created job my-jaeger-es-rollover-28885775 cronjob-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:01 +0000 UTC Normal Pod my-jaeger-es-lookback-28885775-z5hzf.spec.containers{my-jaeger-es-lookback} Created Created container my-jaeger-es-lookback kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:01 +0000 UTC Normal Pod my-jaeger-es-lookback-28885775-z5hzf.spec.containers{my-jaeger-es-lookback} Started Started container my-jaeger-es-lookback kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:01 +0000 UTC Normal Pod my-jaeger-es-rollover-28885775-vgzxm.spec.containers{my-jaeger-es-rollover} Created Created container my-jaeger-es-rollover kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:01 +0000 UTC Normal Pod my-jaeger-es-rollover-28885775-vgzxm.spec.containers{my-jaeger-es-rollover} Started Started container my-jaeger-es-rollover kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:04 +0000 UTC Normal Job.batch my-jaeger-es-lookback-28885775 Completed Job completed job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:04 +0000 UTC Normal CronJob.batch my-jaeger-es-lookback SawCompletedJob Saw completed job: my-jaeger-es-lookback-28885775, condition: Complete cronjob-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:04 +0000 UTC Normal Job.batch my-jaeger-es-rollover-28885775 Completed Job completed job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:04 +0000 UTC Normal CronJob.batch my-jaeger-es-rollover SawCompletedJob Saw completed job: my-jaeger-es-rollover-28885775, condition: Complete cronjob-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:08 +0000 UTC Normal Pod 05-check-indices-sjtpq Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/05-check-indices-sjtpq to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:08 +0000 UTC Normal Pod 05-check-indices-sjtpq AddedInterface Add eth0 [10.131.0.45/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:08 +0000 UTC Normal Pod 05-check-indices-sjtpq.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:08 +0000 UTC Normal Job.batch 05-check-indices SuccessfulCreate Created pod: 05-check-indices-sjtpq job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:11 +0000 UTC Normal Pod 05-check-indices-sjtpq.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 2.394s (2.394s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:11 +0000 UTC Normal Pod 05-check-indices-sjtpq.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:11 +0000 UTC Normal Pod 05-check-indices-sjtpq.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:14 +0000 UTC Normal Job.batch 05-check-indices Completed Job completed job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:15 +0000 UTC Normal Pod 06-check-indices-9kbq8 Binding Scheduled Successfully assigned kuttl-test-deciding-garfish/06-check-indices-9kbq8 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:15 +0000 UTC Normal Pod 06-check-indices-9kbq8 AddedInterface Add eth0 [10.131.0.46/23] from ovn-kubernetes multus logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:15 +0000 UTC Normal Pod 06-check-indices-9kbq8.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:15 +0000 UTC Normal Job.batch 06-check-indices SuccessfulCreate Created pod: 06-check-indices-9kbq8 job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:18 +0000 UTC Normal Pod 06-check-indices-9kbq8.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 2.091s (2.091s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:18 +0000 UTC Normal Pod 06-check-indices-9kbq8.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:18 +0000 UTC Normal Pod 06-check-indices-9kbq8.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:35:21 | es-rollover-autoprov | 2024-12-02 13:35:21 +0000 UTC Normal Job.batch 06-check-indices Completed Job completed job-controller logger.go:42: 13:35:21 | es-rollover-autoprov | Deleting namespace: kuttl-test-deciding-garfish === CONT kuttl/harness/es-increasing-replicas logger.go:42: 13:35:29 | es-increasing-replicas | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:35:29 | es-increasing-replicas | Ignoring check-es-nodes.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:35:29 | es-increasing-replicas | Ignoring openshift-check-es-nodes.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:35:29 | es-increasing-replicas | Creating namespace: kuttl-test-normal-dragon logger.go:42: 13:35:29 | es-increasing-replicas/1-install | starting test step 1-install logger.go:42: 13:35:29 | es-increasing-replicas/1-install | Jaeger:kuttl-test-normal-dragon/simple-prod created logger.go:42: 13:36:10 | es-increasing-replicas/1-install | test step completed 1-install logger.go:42: 13:36:10 | es-increasing-replicas/2-install | starting test step 2-install logger.go:42: 13:36:10 | es-increasing-replicas/2-install | Jaeger:kuttl-test-normal-dragon/simple-prod updated logger.go:42: 13:36:36 | es-increasing-replicas/2-install | test step completed 2-install logger.go:42: 13:36:36 | es-increasing-replicas/3-smoke-test | starting test step 3-smoke-test logger.go:42: 13:36:36 | es-increasing-replicas/3-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 13:36:38 | es-increasing-replicas/3-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:36:45 | es-increasing-replicas/3-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:36:45 | es-increasing-replicas/3-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:36:46 | es-increasing-replicas/3-smoke-test | job.batch/report-span created logger.go:42: 13:36:46 | es-increasing-replicas/3-smoke-test | job.batch/check-span created logger.go:42: 13:36:58 | es-increasing-replicas/3-smoke-test | test step completed 3-smoke-test logger.go:42: 13:36:58 | es-increasing-replicas/4-install | starting test step 4-install logger.go:42: 13:36:58 | es-increasing-replicas/4-install | Jaeger:kuttl-test-normal-dragon/simple-prod updated logger.go:42: 13:36:58 | es-increasing-replicas/4-install | test step completed 4-install logger.go:42: 13:36:58 | es-increasing-replicas/5-check-es-nodes | starting test step 5-check-es-nodes logger.go:42: 13:36:58 | es-increasing-replicas/5-check-es-nodes | running command: [sh -c ./check-es-nodes.sh $NAMESPACE] logger.go:42: 13:36:58 | es-increasing-replicas/5-check-es-nodes | Checking if the number of ES instances is the expected logger.go:42: 13:36:58 | es-increasing-replicas/5-check-es-nodes | false logger.go:42: 13:36:58 | es-increasing-replicas/5-check-es-nodes | Error: no matches found logger.go:42: 13:37:03 | es-increasing-replicas/5-check-es-nodes | Checking if the number of ES instances is the expected logger.go:42: 13:37:03 | es-increasing-replicas/5-check-es-nodes | true logger.go:42: 13:37:03 | es-increasing-replicas/5-check-es-nodes | test step completed 5-check-es-nodes logger.go:42: 13:37:03 | es-increasing-replicas | es-increasing-replicas events from ns kuttl-test-normal-dragon: logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:35:40 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestnormaldragonsimpleprod-1-f9b754665 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestnormaldragonsimpleprod-1-f9b754z6bm5 replicaset-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:35:40 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestnormaldragonsimpleprod-1-f9b754z6bm5 Binding Scheduled Successfully assigned kuttl-test-normal-dragon/elasticsearch-cdm-kuttltestnormaldragonsimpleprod-1-f9b754z6bm5 to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:35:40 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestnormaldragonsimpleprod-1-f9b754z6bm5 AddedInterface Add eth0 [10.128.2.34/23] from ovn-kubernetes multus logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:35:40 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestnormaldragonsimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestnormaldragonsimpleprod-1-f9b754665 to 1 deployment-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:35:41 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestnormaldragonsimpleprod-1-f9b754z6bm5.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:8e4fbea4983cd58352349ca291383169b286bc166fad95a87807552ca43335e6" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:35:41 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestnormaldragonsimpleprod-1-f9b754z6bm5.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:35:41 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestnormaldragonsimpleprod-1-f9b754z6bm5.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:35:41 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestnormaldragonsimpleprod-1-f9b754z6bm5.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:d68824b0b2c84db8e33edf9ab344eb684c4a7ebd7ef162bbc309043adcb28e6b" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:35:41 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestnormaldragonsimpleprod-1-f9b754z6bm5.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:35:41 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestnormaldragonsimpleprod-1-f9b754z6bm5.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:35:56 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestnormaldragonsimpleprod-1-f9b754z6bm5.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:07 +0000 UTC Normal Pod simple-prod-collector-c8cfbcd49-cldn9 Binding Scheduled Successfully assigned kuttl-test-normal-dragon/simple-prod-collector-c8cfbcd49-cldn9 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:07 +0000 UTC Normal Pod simple-prod-collector-c8cfbcd49-cldn9 AddedInterface Add eth0 [10.131.0.47/23] from ovn-kubernetes multus logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:07 +0000 UTC Normal Pod simple-prod-collector-c8cfbcd49-cldn9.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:44686d560aa27cae8ff8693f88c4cb6e2edf1737010ec1f80709cb42250b729d" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:07 +0000 UTC Normal Pod simple-prod-collector-c8cfbcd49-cldn9.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:07 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-c8cfbcd49 SuccessfulCreate Created pod: simple-prod-collector-c8cfbcd49-cldn9 replicaset-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:07 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-c8cfbcd49 to 1 deployment-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:07 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-tcsv2 Binding Scheduled Successfully assigned kuttl-test-normal-dragon/simple-prod-query-55d545bfbc-tcsv2 to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:07 +0000 UTC Warning Pod simple-prod-query-55d545bfbc-tcsv2 FailedMount MountVolume.SetUp failed for volume "simple-prod-ui-oauth-proxy-tls" : secret "simple-prod-ui-oauth-proxy-tls" not found kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:07 +0000 UTC Normal ReplicaSet.apps simple-prod-query-55d545bfbc SuccessfulCreate Created pod: simple-prod-query-55d545bfbc-tcsv2 replicaset-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:07 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-55d545bfbc to 1 deployment-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:08 +0000 UTC Normal Pod simple-prod-collector-c8cfbcd49-cldn9.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:08 +0000 UTC Warning Pod simple-prod-collector-c8cfbcd49-cldn9.spec.containers{jaeger-collector} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:08 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-tcsv2 AddedInterface Add eth0 [10.129.2.35/23] from ovn-kubernetes multus logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:08 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-tcsv2.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:08 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-tcsv2.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:08 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-tcsv2.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:08 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-tcsv2.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:08 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-tcsv2.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:08 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-tcsv2.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:08 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-tcsv2.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:08 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-tcsv2.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:09 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-tcsv2.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:12 +0000 UTC Normal Pod simple-prod-collector-c8cfbcd49-p28zr Binding Scheduled Successfully assigned kuttl-test-normal-dragon/simple-prod-collector-c8cfbcd49-p28zr to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:12 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-c8cfbcd49 SuccessfulCreate Created pod: simple-prod-collector-c8cfbcd49-p28zr replicaset-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:12 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-c8cfbcd49 to 2 from 1 deployment-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:12 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-znjkn Binding Scheduled Successfully assigned kuttl-test-normal-dragon/simple-prod-query-55d545bfbc-znjkn to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:12 +0000 UTC Normal ReplicaSet.apps simple-prod-query-55d545bfbc SuccessfulCreate Created pod: simple-prod-query-55d545bfbc-znjkn replicaset-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:12 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-55d545bfbc to 2 from 1 deployment-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:13 +0000 UTC Normal Pod simple-prod-collector-c8cfbcd49-p28zr AddedInterface Add eth0 [10.129.2.36/23] from ovn-kubernetes multus logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:13 +0000 UTC Normal Pod simple-prod-collector-c8cfbcd49-p28zr.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:44686d560aa27cae8ff8693f88c4cb6e2edf1737010ec1f80709cb42250b729d" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:13 +0000 UTC Normal Pod simple-prod-collector-c8cfbcd49-p28zr.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:13 +0000 UTC Normal Pod simple-prod-collector-c8cfbcd49-p28zr.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:13 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-znjkn AddedInterface Add eth0 [10.131.0.48/23] from ovn-kubernetes multus logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:13 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-znjkn.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:24 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-znjkn.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" in 10.911s (10.911s including waiting). Image size: 192936692 bytes. kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:24 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-znjkn.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:24 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-znjkn.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:24 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-znjkn.spec.containers{oauth-proxy} Pulling Pulling image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:28 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-znjkn.spec.containers{oauth-proxy} Pulled Successfully pulled image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" in 3.823s (3.823s including waiting). Image size: 339954870 bytes. kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:28 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-znjkn.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:28 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-znjkn.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:28 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-znjkn.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:34 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-znjkn.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" in 6.062s (6.062s including waiting). Image size: 115697219 bytes. kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:34 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-znjkn.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:34 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-znjkn.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:39 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-tcsv2.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:39 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-tcsv2.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:39 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-tcsv2.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:39 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-znjkn.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:39 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-znjkn.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:39 +0000 UTC Normal Pod simple-prod-query-55d545bfbc-znjkn.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:39 +0000 UTC Normal ReplicaSet.apps simple-prod-query-55d545bfbc SuccessfulDelete Deleted pod: simple-prod-query-55d545bfbc-tcsv2 replicaset-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:39 +0000 UTC Normal ReplicaSet.apps simple-prod-query-55d545bfbc SuccessfulDelete Deleted pod: simple-prod-query-55d545bfbc-znjkn replicaset-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:39 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-55d545bfbc to 0 from 2 deployment-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:41 +0000 UTC Normal Pod simple-prod-query-674c588fbb-bxnm2 Binding Scheduled Successfully assigned kuttl-test-normal-dragon/simple-prod-query-674c588fbb-bxnm2 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:41 +0000 UTC Normal Pod simple-prod-query-674c588fbb-jt8s2 Binding Scheduled Successfully assigned kuttl-test-normal-dragon/simple-prod-query-674c588fbb-jt8s2 to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:41 +0000 UTC Normal ReplicaSet.apps simple-prod-query-674c588fbb SuccessfulCreate Created pod: simple-prod-query-674c588fbb-jt8s2 replicaset-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:41 +0000 UTC Normal ReplicaSet.apps simple-prod-query-674c588fbb SuccessfulCreate Created pod: simple-prod-query-674c588fbb-bxnm2 replicaset-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:41 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-674c588fbb to 2 deployment-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:42 +0000 UTC Normal Pod simple-prod-query-674c588fbb-bxnm2 AddedInterface Add eth0 [10.131.0.49/23] from ovn-kubernetes multus logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:42 +0000 UTC Normal Pod simple-prod-query-674c588fbb-bxnm2.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:42 +0000 UTC Normal Pod simple-prod-query-674c588fbb-bxnm2.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:42 +0000 UTC Normal Pod simple-prod-query-674c588fbb-bxnm2.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:42 +0000 UTC Normal Pod simple-prod-query-674c588fbb-bxnm2.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:42 +0000 UTC Normal Pod simple-prod-query-674c588fbb-bxnm2.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:42 +0000 UTC Normal Pod simple-prod-query-674c588fbb-bxnm2.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:42 +0000 UTC Normal Pod simple-prod-query-674c588fbb-bxnm2.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:42 +0000 UTC Normal Pod simple-prod-query-674c588fbb-bxnm2.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:42 +0000 UTC Normal Pod simple-prod-query-674c588fbb-bxnm2.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:42 +0000 UTC Normal Pod simple-prod-query-674c588fbb-jt8s2 AddedInterface Add eth0 [10.129.2.37/23] from ovn-kubernetes multus logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:42 +0000 UTC Normal Pod simple-prod-query-674c588fbb-jt8s2.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:42 +0000 UTC Normal Pod simple-prod-query-674c588fbb-jt8s2.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:42 +0000 UTC Normal Pod simple-prod-query-674c588fbb-jt8s2.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:42 +0000 UTC Normal Pod simple-prod-query-674c588fbb-jt8s2.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:42 +0000 UTC Normal Pod simple-prod-query-674c588fbb-jt8s2.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:42 +0000 UTC Normal Pod simple-prod-query-674c588fbb-jt8s2.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:42 +0000 UTC Normal Pod simple-prod-query-674c588fbb-jt8s2.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:42 +0000 UTC Normal Pod simple-prod-query-674c588fbb-jt8s2.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:42 +0000 UTC Normal Pod simple-prod-query-674c588fbb-jt8s2.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:46 +0000 UTC Normal Pod check-span-b4t42 Binding Scheduled Successfully assigned kuttl-test-normal-dragon/check-span-b4t42 to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:46 +0000 UTC Normal Pod check-span-b4t42 AddedInterface Add eth0 [10.129.2.38/23] from ovn-kubernetes multus logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:46 +0000 UTC Normal Pod check-span-b4t42.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:46 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-b4t42 job-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:46 +0000 UTC Normal Pod report-span-s7d8n Binding Scheduled Successfully assigned kuttl-test-normal-dragon/report-span-s7d8n to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:46 +0000 UTC Normal Pod report-span-s7d8n AddedInterface Add eth0 [10.131.0.50/23] from ovn-kubernetes multus logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:46 +0000 UTC Normal Pod report-span-s7d8n.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:46 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-s7d8n job-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:47 +0000 UTC Normal Pod check-span-b4t42.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 1.037s (1.037s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:47 +0000 UTC Normal Pod report-span-s7d8n.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 1.015s (1.015s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:47 +0000 UTC Normal Pod report-span-s7d8n.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:47 +0000 UTC Normal Pod report-span-s7d8n.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:48 +0000 UTC Normal Pod check-span-b4t42.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:48 +0000 UTC Normal Pod check-span-b4t42.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:36:58 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:00 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestnormaldragonsimpleprod-2-688cfb487f SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestnormaldragonsimpleprod-2-688cfb9sm46 replicaset-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:00 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestnormaldragonsimpleprod-2-688cfb9sm46 Binding Scheduled Successfully assigned kuttl-test-normal-dragon/elasticsearch-cdm-kuttltestnormaldragonsimpleprod-2-688cfb9sm46 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:00 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestnormaldragonsimpleprod-2 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestnormaldragonsimpleprod-2-688cfb487f to 1 deployment-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:00 +0000 UTC Normal Pod simple-prod-collector-c8cfbcd49-cldn9.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:00 +0000 UTC Normal Pod simple-prod-collector-c8cfbcd49-p28zr.spec.containers{jaeger-collector} Killing Stopping container jaeger-collector kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:00 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-c8cfbcd49 SuccessfulDelete Deleted pod: simple-prod-collector-c8cfbcd49-p28zr replicaset-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:00 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-c8cfbcd49 SuccessfulDelete Deleted pod: simple-prod-collector-c8cfbcd49-cldn9 replicaset-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:00 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled down replica set simple-prod-collector-c8cfbcd49 to 0 from 2 deployment-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:00 +0000 UTC Normal Pod simple-prod-query-674c588fbb-bxnm2.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:00 +0000 UTC Normal Pod simple-prod-query-674c588fbb-bxnm2.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:00 +0000 UTC Normal Pod simple-prod-query-674c588fbb-bxnm2.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:00 +0000 UTC Normal Pod simple-prod-query-674c588fbb-jt8s2.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:00 +0000 UTC Normal Pod simple-prod-query-674c588fbb-jt8s2.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:00 +0000 UTC Normal Pod simple-prod-query-674c588fbb-jt8s2.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:00 +0000 UTC Normal ReplicaSet.apps simple-prod-query-674c588fbb SuccessfulDelete Deleted pod: simple-prod-query-674c588fbb-jt8s2 replicaset-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:00 +0000 UTC Normal ReplicaSet.apps simple-prod-query-674c588fbb SuccessfulDelete Deleted pod: simple-prod-query-674c588fbb-bxnm2 replicaset-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:00 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-674c588fbb to 0 from 2 deployment-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestnormaldragonsimpleprod-2-688cfb9sm46 AddedInterface Add eth0 [10.131.0.51/23] from ovn-kubernetes multus logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestnormaldragonsimpleprod-2-688cfb9sm46.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:8e4fbea4983cd58352349ca291383169b286bc166fad95a87807552ca43335e6" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestnormaldragonsimpleprod-2-688cfb9sm46.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestnormaldragonsimpleprod-2-688cfb9sm46.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestnormaldragonsimpleprod-2-688cfb9sm46.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:d68824b0b2c84db8e33edf9ab344eb684c4a7ebd7ef162bbc309043adcb28e6b" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestnormaldragonsimpleprod-2-688cfb9sm46.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:01 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestnormaldragonsimpleprod-2-688cfb9sm46.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:01 +0000 UTC Normal Pod simple-prod-collector-6bfbf8d4b6-gxkk8 Binding Scheduled Successfully assigned kuttl-test-normal-dragon/simple-prod-collector-6bfbf8d4b6-gxkk8 to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:01 +0000 UTC Normal Pod simple-prod-collector-6bfbf8d4b6-tcj6g Binding Scheduled Successfully assigned kuttl-test-normal-dragon/simple-prod-collector-6bfbf8d4b6-tcj6g to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:01 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-6bfbf8d4b6 SuccessfulCreate Created pod: simple-prod-collector-6bfbf8d4b6-tcj6g replicaset-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:01 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-6bfbf8d4b6 SuccessfulCreate Created pod: simple-prod-collector-6bfbf8d4b6-gxkk8 replicaset-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:01 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-6bfbf8d4b6 to 2 deployment-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:01 +0000 UTC Normal Pod simple-prod-query-96bf6685c-2tlbr Binding Scheduled Successfully assigned kuttl-test-normal-dragon/simple-prod-query-96bf6685c-2tlbr to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:01 +0000 UTC Normal Pod simple-prod-query-96bf6685c-8zkx7 Binding Scheduled Successfully assigned kuttl-test-normal-dragon/simple-prod-query-96bf6685c-8zkx7 to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:01 +0000 UTC Normal ReplicaSet.apps simple-prod-query-96bf6685c SuccessfulCreate Created pod: simple-prod-query-96bf6685c-8zkx7 replicaset-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:01 +0000 UTC Normal ReplicaSet.apps simple-prod-query-96bf6685c SuccessfulCreate Created pod: simple-prod-query-96bf6685c-2tlbr replicaset-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:01 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-96bf6685c to 2 deployment-controller logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-collector-6bfbf8d4b6-gxkk8 AddedInterface Add eth0 [10.128.2.35/23] from ovn-kubernetes multus logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-collector-6bfbf8d4b6-gxkk8.spec.containers{jaeger-collector} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:44686d560aa27cae8ff8693f88c4cb6e2edf1737010ec1f80709cb42250b729d" kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-collector-6bfbf8d4b6-tcj6g AddedInterface Add eth0 [10.129.2.40/23] from ovn-kubernetes multus logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-collector-6bfbf8d4b6-tcj6g.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:44686d560aa27cae8ff8693f88c4cb6e2edf1737010ec1f80709cb42250b729d" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-collector-6bfbf8d4b6-tcj6g.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-collector-6bfbf8d4b6-tcj6g.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-query-96bf6685c-2tlbr AddedInterface Add eth0 [10.131.0.52/23] from ovn-kubernetes multus logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-query-96bf6685c-2tlbr.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-query-96bf6685c-2tlbr.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-query-96bf6685c-2tlbr.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-query-96bf6685c-2tlbr.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-query-96bf6685c-2tlbr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-query-96bf6685c-2tlbr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-query-96bf6685c-2tlbr.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-query-96bf6685c-8zkx7 AddedInterface Add eth0 [10.129.2.39/23] from ovn-kubernetes multus logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-query-96bf6685c-8zkx7.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-query-96bf6685c-8zkx7.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-query-96bf6685c-8zkx7.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-query-96bf6685c-8zkx7.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-query-96bf6685c-8zkx7.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-query-96bf6685c-8zkx7.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-query-96bf6685c-8zkx7.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-query-96bf6685c-8zkx7.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:02 +0000 UTC Normal Pod simple-prod-query-96bf6685c-8zkx7.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:03 +0000 UTC Normal Pod simple-prod-query-96bf6685c-2tlbr.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:37:03 | es-increasing-replicas | 2024-12-02 13:37:03 +0000 UTC Normal Pod simple-prod-query-96bf6685c-2tlbr.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:37:03 | es-increasing-replicas | Deleting namespace: kuttl-test-normal-dragon === CONT kuttl/harness/es-index-cleaner-autoprov logger.go:42: 13:37:39 | es-index-cleaner-autoprov | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:37:39 | es-index-cleaner-autoprov | Creating namespace: kuttl-test-cute-marten logger.go:42: 13:37:39 | es-index-cleaner-autoprov/1-install | starting test step 1-install logger.go:42: 13:37:39 | es-index-cleaner-autoprov/1-install | Jaeger:kuttl-test-cute-marten/test-es-index-cleaner-with-prefix created logger.go:42: 13:38:29 | es-index-cleaner-autoprov/1-install | test step completed 1-install logger.go:42: 13:38:29 | es-index-cleaner-autoprov/2-report-spans | starting test step 2-report-spans logger.go:42: 13:38:29 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE test-es-index-cleaner-with-prefix /dev/null] logger.go:42: 13:38:30 | es-index-cleaner-autoprov/2-report-spans | Warning: resource jaegers/test-es-index-cleaner-with-prefix is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:38:37 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c DAYS=5 ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JOB_NUMBER=00 JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/report-spans.yaml.template -o report-span-00-job.yaml] logger.go:42: 13:38:38 | es-index-cleaner-autoprov/2-report-spans | running command: [sh -c kubectl apply -f report-span-00-job.yaml -n $NAMESPACE] logger.go:42: 13:38:38 | es-index-cleaner-autoprov/2-report-spans | job.batch/00-report-span created logger.go:42: 13:48:38 | es-index-cleaner-autoprov/2-report-spans | test step failed 2-report-spans case.go:364: failed in step 2-report-spans case.go:366: --- Job:kuttl-test-cute-marten/00-report-span +++ Job:kuttl-test-cute-marten/00-report-span @@ -1,8 +1,162 @@ apiVersion: batch/v1 kind: Job metadata: + annotations: + kubectl.kubernetes.io/last-applied-configuration: | + {"apiVersion":"batch/v1","kind":"Job","metadata":{"annotations":{},"name":"00-report-span","namespace":"kuttl-test-cute-marten"},"spec":{"backoffLimit":15,"template":{"spec":{"containers":[{"command":["./reporter","--days","5","--verbose"],"env":[{"name":"JAEGER_SERVICE_NAME","value":"smoke-test-service"},{"name":"OPERATION_NAME","value":"smoke-test-operation"},{"name":"JAEGER_ENDPOINT","value":"http://test-es-index-cleaner-with-prefix-collector-headless:14268/api/traces"},{"name":"JAEGER_QUERY","value":"https://test-es-index-cleaner-with-prefix-query/api/traces"},{"name":"SECRET_PATH","value":"/var/run/secrets/api-token/token"}],"image":"quay.io/rhn_support_ikanse/jaeger-asserts:latest","name":"asserts-container","volumeMounts":[{"mountPath":"/var/run/secrets/api-token","name":"token-api-volume"}]}],"restartPolicy":"OnFailure","volumes":[{"name":"token-api-volume","secret":{"secretName":"e2e-test"}}]}}}} + labels: + batch.kubernetes.io/controller-uid: 9bd9f840-5454-4679-b068-e2b8440bfbb9 + batch.kubernetes.io/job-name: 00-report-span + controller-uid: 9bd9f840-5454-4679-b068-e2b8440bfbb9 + job-name: 00-report-span + managedFields: + - apiVersion: batch/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:kubectl.kubernetes.io/last-applied-configuration: {} + f:spec: + f:backoffLimit: {} + f:completionMode: {} + f:completions: {} + f:manualSelector: {} + f:parallelism: {} + f:podReplacementPolicy: {} + f:suspend: {} + f:template: + f:spec: + f:containers: + k:{"name":"asserts-container"}: + .: {} + f:command: {} + f:env: + .: {} + k:{"name":"JAEGER_ENDPOINT"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"JAEGER_QUERY"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"JAEGER_SERVICE_NAME"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"OPERATION_NAME"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SECRET_PATH"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:name: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/var/run/secrets/api-token"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"token-api-volume"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: kubectl-client-side-apply + operation: Update + time: "2024-12-02T13:38:38Z" + - apiVersion: batch/v1 + fieldsType: FieldsV1 + fieldsV1: + f:status: + f:active: {} + f:ready: {} + f:startTime: {} + f:terminating: {} + f:uncountedTerminatedPods: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2024-12-02T13:38:41Z" name: 00-report-span namespace: kuttl-test-cute-marten +spec: + backoffLimit: 15 + completionMode: NonIndexed + completions: 1 + manualSelector: false + parallelism: 1 + podReplacementPolicy: TerminatingOrFailed + selector: + matchLabels: + batch.kubernetes.io/controller-uid: 9bd9f840-5454-4679-b068-e2b8440bfbb9 + suspend: false + template: + metadata: + creationTimestamp: null + labels: + batch.kubernetes.io/controller-uid: 9bd9f840-5454-4679-b068-e2b8440bfbb9 + batch.kubernetes.io/job-name: 00-report-span + controller-uid: 9bd9f840-5454-4679-b068-e2b8440bfbb9 + job-name: 00-report-span + spec: + containers: + - command: + - ./reporter + - --days + - "5" + - --verbose + env: + - name: JAEGER_SERVICE_NAME + value: smoke-test-service + - name: OPERATION_NAME + value: smoke-test-operation + - name: JAEGER_ENDPOINT + value: http://test-es-index-cleaner-with-prefix-collector-headless:14268/api/traces + - name: JAEGER_QUERY + value: https://test-es-index-cleaner-with-prefix-query/api/traces + - name: SECRET_PATH + value: /var/run/secrets/api-token/token + image: quay.io/rhn_support_ikanse/jaeger-asserts:latest + imagePullPolicy: Always + name: asserts-container + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /var/run/secrets/api-token + name: token-api-volume + dnsPolicy: ClusterFirst + restartPolicy: OnFailure + schedulerName: default-scheduler + securityContext: {} + terminationGracePeriodSeconds: 30 + volumes: + - name: token-api-volume + secret: + defaultMode: 420 + secretName: e2e-test status: - succeeded: 1 + active: 1 + ready: 1 + startTime: "2024-12-02T13:38:38Z" + terminating: 0 + uncountedTerminatedPods: {} case.go:366: resource Job:kuttl-test-cute-marten/00-report-span: .status.succeeded: key is missing from map logger.go:42: 13:48:38 | es-index-cleaner-autoprov | es-index-cleaner-autoprov events from ns kuttl-test-cute-marten: logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:37:59 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcutemartentestesindexcleane-1-76bl9r Binding Scheduled Successfully assigned kuttl-test-cute-marten/elasticsearch-cdm-kuttltestcutemartentestesindexcleane-1-76bl9r to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:37:59 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestcutemartentestesindexcleane-1-778495f89c SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestcutemartentestesindexcleane-1-76bl9r replicaset-controller logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:37:59 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestcutemartentestesindexcleane-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestcutemartentestesindexcleane-1-778495f89c to 1 deployment-controller logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:00 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcutemartentestesindexcleane-1-76bl9r AddedInterface Add eth0 [10.128.2.36/23] from ovn-kubernetes multus logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:00 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcutemartentestesindexcleane-1-76bl9r.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:8e4fbea4983cd58352349ca291383169b286bc166fad95a87807552ca43335e6" already present on machine kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:00 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcutemartentestesindexcleane-1-76bl9r.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:00 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcutemartentestesindexcleane-1-76bl9r.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:00 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcutemartentestesindexcleane-1-76bl9r.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:d68824b0b2c84db8e33edf9ab344eb684c4a7ebd7ef162bbc309043adcb28e6b" already present on machine kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:00 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcutemartentestesindexcleane-1-76bl9r.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:00 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestcutemartentestesindexcleane-1-76bl9r.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:15 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestcutemartentestesindexcleane-1-76bl9r.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:26 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-7f96d765c9-7tvms Binding Scheduled Successfully assigned kuttl-test-cute-marten/test-es-index-cleaner-with-prefix-collector-7f96d765c9-7tvms to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:26 +0000 UTC Warning Pod test-es-index-cleaner-with-prefix-collector-7f96d765c9-7tvms FailedMount MountVolume.SetUp failed for volume "test-es-index-cleaner-with-prefix-collector-tls-config-volume" : secret "test-es-index-cleaner-with-prefix-collector-headless-tls" not found kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:26 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-collector-7f96d765c9 SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-collector-7f96d765c9-7tvms replicaset-controller logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:26 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-collector ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-collector-7f96d765c9 to 1 deployment-controller logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:26 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-9897c48d-rqlls Binding Scheduled Successfully assigned kuttl-test-cute-marten/test-es-index-cleaner-with-prefix-query-9897c48d-rqlls to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:26 +0000 UTC Normal ReplicaSet.apps test-es-index-cleaner-with-prefix-query-9897c48d SuccessfulCreate Created pod: test-es-index-cleaner-with-prefix-query-9897c48d-rqlls replicaset-controller logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:26 +0000 UTC Normal Deployment.apps test-es-index-cleaner-with-prefix-query ScalingReplicaSet Scaled up replica set test-es-index-cleaner-with-prefix-query-9897c48d to 1 deployment-controller logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:27 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-7f96d765c9-7tvms AddedInterface Add eth0 [10.131.0.53/23] from ovn-kubernetes multus logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:27 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-7f96d765c9-7tvms.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:44686d560aa27cae8ff8693f88c4cb6e2edf1737010ec1f80709cb42250b729d" already present on machine kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:27 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-7f96d765c9-7tvms.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:27 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-collector-7f96d765c9-7tvms.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:27 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-9897c48d-rqlls AddedInterface Add eth0 [10.129.2.41/23] from ovn-kubernetes multus logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:27 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-9897c48d-rqlls.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:27 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-9897c48d-rqlls.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:27 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-9897c48d-rqlls.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:27 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-9897c48d-rqlls.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:27 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-9897c48d-rqlls.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:27 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-9897c48d-rqlls.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:27 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-9897c48d-rqlls.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:27 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-9897c48d-rqlls.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:27 +0000 UTC Normal Pod test-es-index-cleaner-with-prefix-query-9897c48d-rqlls.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:38 +0000 UTC Normal Pod 00-report-span-77h6q Binding Scheduled Successfully assigned kuttl-test-cute-marten/00-report-span-77h6q to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:38 +0000 UTC Normal Job.batch 00-report-span SuccessfulCreate Created pod: 00-report-span-77h6q job-controller logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:39 +0000 UTC Normal Pod 00-report-span-77h6q AddedInterface Add eth0 [10.131.0.54/23] from ovn-kubernetes multus logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:39 +0000 UTC Normal Pod 00-report-span-77h6q.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:39 +0000 UTC Normal Pod 00-report-span-77h6q.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 721ms (721ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:39 +0000 UTC Normal Pod 00-report-span-77h6q.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:39 +0000 UTC Normal Pod 00-report-span-77h6q.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:41 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:41 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:41 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:56 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:38:56 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:39:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod test-es-index-cleaner-with-prefix-collector-7f96d765c9-7tvms horizontal-pod-autoscaler logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:39:11 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod test-es-index-cleaner-with-prefix-collector-7f96d765c9-7tvms horizontal-pod-autoscaler logger.go:42: 13:48:38 | es-index-cleaner-autoprov | 2024-12-02 13:39:26 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling test-es-index-cleaner-with-prefix-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod test-es-index-cleaner-with-prefix-collector-7f96d765c9-7tvms horizontal-pod-autoscaler logger.go:42: 13:48:38 | es-index-cleaner-autoprov | Deleting namespace: kuttl-test-cute-marten === CONT kuttl/harness/es-from-aio-to-production logger.go:42: 13:48:51 | es-from-aio-to-production | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:48:51 | es-from-aio-to-production | Creating namespace: kuttl-test-renewed-impala logger.go:42: 13:48:51 | es-from-aio-to-production/0-install | starting test step 0-install logger.go:42: 13:48:51 | es-from-aio-to-production/0-install | Jaeger:kuttl-test-renewed-impala/my-jaeger created logger.go:42: 13:48:57 | es-from-aio-to-production/0-install | test step completed 0-install logger.go:42: 13:48:57 | es-from-aio-to-production/1-smoke-test | starting test step 1-smoke-test logger.go:42: 13:48:57 | es-from-aio-to-production/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 13:48:59 | es-from-aio-to-production/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:49:06 | es-from-aio-to-production/1-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:49:07 | es-from-aio-to-production/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:49:07 | es-from-aio-to-production/1-smoke-test | job.batch/report-span created logger.go:42: 13:49:07 | es-from-aio-to-production/1-smoke-test | job.batch/check-span created logger.go:42: 13:49:18 | es-from-aio-to-production/1-smoke-test | test step completed 1-smoke-test logger.go:42: 13:49:18 | es-from-aio-to-production/3-install | starting test step 3-install logger.go:42: 13:49:19 | es-from-aio-to-production/3-install | Jaeger:kuttl-test-renewed-impala/my-jaeger updated logger.go:42: 13:50:09 | es-from-aio-to-production/3-install | test step completed 3-install logger.go:42: 13:50:09 | es-from-aio-to-production/4-smoke-test | starting test step 4-smoke-test logger.go:42: 13:50:09 | es-from-aio-to-production/4-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 13:50:17 | es-from-aio-to-production/4-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:50:17 | es-from-aio-to-production/4-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:50:18 | es-from-aio-to-production/4-smoke-test | job.batch/report-span unchanged logger.go:42: 13:50:18 | es-from-aio-to-production/4-smoke-test | job.batch/check-span unchanged logger.go:42: 13:50:18 | es-from-aio-to-production/4-smoke-test | test step completed 4-smoke-test logger.go:42: 13:50:18 | es-from-aio-to-production | es-from-aio-to-production events from ns kuttl-test-renewed-impala: logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:48:55 +0000 UTC Normal Pod my-jaeger-7bbdcb66d9-b68lv Binding Scheduled Successfully assigned kuttl-test-renewed-impala/my-jaeger-7bbdcb66d9-b68lv to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:48:55 +0000 UTC Normal ReplicaSet.apps my-jaeger-7bbdcb66d9 SuccessfulCreate Created pod: my-jaeger-7bbdcb66d9-b68lv replicaset-controller logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:48:55 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-7bbdcb66d9 to 1 deployment-controller logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:48:56 +0000 UTC Normal Pod my-jaeger-7bbdcb66d9-b68lv AddedInterface Add eth0 [10.128.2.38/23] from ovn-kubernetes multus logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:48:56 +0000 UTC Normal Pod my-jaeger-7bbdcb66d9-b68lv.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:48:56 +0000 UTC Normal Pod my-jaeger-7bbdcb66d9-b68lv.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:48:56 +0000 UTC Normal Pod my-jaeger-7bbdcb66d9-b68lv.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:48:56 +0000 UTC Normal Pod my-jaeger-7bbdcb66d9-b68lv.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:48:56 +0000 UTC Normal Pod my-jaeger-7bbdcb66d9-b68lv.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:48:56 +0000 UTC Normal Pod my-jaeger-7bbdcb66d9-b68lv.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:03 +0000 UTC Normal Pod my-jaeger-7bbdcb66d9-b68lv.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:03 +0000 UTC Normal Pod my-jaeger-7bbdcb66d9-b68lv.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:03 +0000 UTC Normal ReplicaSet.apps my-jaeger-7bbdcb66d9 SuccessfulDelete Deleted pod: my-jaeger-7bbdcb66d9-b68lv replicaset-controller logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:03 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-7bbdcb66d9 to 0 from 1 deployment-controller logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:04 +0000 UTC Normal Pod my-jaeger-5fb899589c-vx6v7 Binding Scheduled Successfully assigned kuttl-test-renewed-impala/my-jaeger-5fb899589c-vx6v7 to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:04 +0000 UTC Normal ReplicaSet.apps my-jaeger-5fb899589c SuccessfulCreate Created pod: my-jaeger-5fb899589c-vx6v7 replicaset-controller logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:04 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-5fb899589c to 1 deployment-controller logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:05 +0000 UTC Normal Pod my-jaeger-5fb899589c-vx6v7 AddedInterface Add eth0 [10.128.2.39/23] from ovn-kubernetes multus logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:05 +0000 UTC Normal Pod my-jaeger-5fb899589c-vx6v7.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:05 +0000 UTC Normal Pod my-jaeger-5fb899589c-vx6v7.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:05 +0000 UTC Normal Pod my-jaeger-5fb899589c-vx6v7.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:05 +0000 UTC Normal Pod my-jaeger-5fb899589c-vx6v7.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:05 +0000 UTC Normal Pod my-jaeger-5fb899589c-vx6v7.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:05 +0000 UTC Normal Pod my-jaeger-5fb899589c-vx6v7.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:07 +0000 UTC Normal Pod check-span-466sf Binding Scheduled Successfully assigned kuttl-test-renewed-impala/check-span-466sf to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:07 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-466sf job-controller logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:07 +0000 UTC Normal Pod report-span-gl6w7 Binding Scheduled Successfully assigned kuttl-test-renewed-impala/report-span-gl6w7 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:07 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-gl6w7 job-controller logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:08 +0000 UTC Normal Pod check-span-466sf AddedInterface Add eth0 [10.129.2.42/23] from ovn-kubernetes multus logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:08 +0000 UTC Normal Pod check-span-466sf.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:08 +0000 UTC Normal Pod report-span-gl6w7 AddedInterface Add eth0 [10.131.0.55/23] from ovn-kubernetes multus logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:08 +0000 UTC Normal Pod report-span-gl6w7.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:10 +0000 UTC Normal Pod report-span-gl6w7.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 2.346s (2.346s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:10 +0000 UTC Normal Pod report-span-gl6w7.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:10 +0000 UTC Normal Pod report-span-gl6w7.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:15 +0000 UTC Normal Pod check-span-466sf.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 6.728s (6.728s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:15 +0000 UTC Normal Pod check-span-466sf.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:15 +0000 UTC Normal Pod check-span-466sf.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:18 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:39 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestrenewedimpalamyjaeger-1-66c99784b9 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestrenewedimpalamyjaeger-1-66c9978kmbzr replicaset-controller logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrenewedimpalamyjaeger-1-66c9978kmbzr Binding Scheduled Successfully assigned kuttl-test-renewed-impala/elasticsearch-cdm-kuttltestrenewedimpalamyjaeger-1-66c9978kmbzr to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrenewedimpalamyjaeger-1-66c9978kmbzr AddedInterface Add eth0 [10.128.2.40/23] from ovn-kubernetes multus logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrenewedimpalamyjaeger-1-66c9978kmbzr.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:8e4fbea4983cd58352349ca291383169b286bc166fad95a87807552ca43335e6" already present on machine kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:39 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrenewedimpalamyjaeger-1-66c9978kmbzr.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:39 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestrenewedimpalamyjaeger-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestrenewedimpalamyjaeger-1-66c99784b9 to 1 deployment-controller logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:40 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrenewedimpalamyjaeger-1-66c9978kmbzr.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:40 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrenewedimpalamyjaeger-1-66c9978kmbzr.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:d68824b0b2c84db8e33edf9ab344eb684c4a7ebd7ef162bbc309043adcb28e6b" already present on machine kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:40 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrenewedimpalamyjaeger-1-66c9978kmbzr.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:40 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrenewedimpalamyjaeger-1-66c9978kmbzr.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:43 +0000 UTC Normal Job.batch report-span Completed Job completed job-controller logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:49:54 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestrenewedimpalamyjaeger-1-66c9978kmbzr.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:06 +0000 UTC Normal Pod my-jaeger-collector-55d4f46b55-4mp9r Binding Scheduled Successfully assigned kuttl-test-renewed-impala/my-jaeger-collector-55d4f46b55-4mp9r to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:06 +0000 UTC Normal Pod my-jaeger-collector-55d4f46b55-4mp9r AddedInterface Add eth0 [10.131.0.56/23] from ovn-kubernetes multus logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:06 +0000 UTC Normal Pod my-jaeger-collector-55d4f46b55-4mp9r.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:44686d560aa27cae8ff8693f88c4cb6e2edf1737010ec1f80709cb42250b729d" already present on machine kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:06 +0000 UTC Normal Pod my-jaeger-collector-55d4f46b55-4mp9r.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:06 +0000 UTC Normal Pod my-jaeger-collector-55d4f46b55-4mp9r.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:06 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-55d4f46b55 SuccessfulCreate Created pod: my-jaeger-collector-55d4f46b55-4mp9r replicaset-controller logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:06 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-55d4f46b55 to 1 deployment-controller logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:06 +0000 UTC Normal Pod my-jaeger-query-6b65ff56fb-f7jd8 Binding Scheduled Successfully assigned kuttl-test-renewed-impala/my-jaeger-query-6b65ff56fb-f7jd8 to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:06 +0000 UTC Normal Pod my-jaeger-query-6b65ff56fb-f7jd8 AddedInterface Add eth0 [10.129.2.43/23] from ovn-kubernetes multus logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:06 +0000 UTC Normal Pod my-jaeger-query-6b65ff56fb-f7jd8.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:06 +0000 UTC Normal Pod my-jaeger-query-6b65ff56fb-f7jd8.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:06 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-6b65ff56fb SuccessfulCreate Created pod: my-jaeger-query-6b65ff56fb-f7jd8 replicaset-controller logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:06 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-6b65ff56fb to 1 deployment-controller logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:07 +0000 UTC Normal Pod my-jaeger-query-6b65ff56fb-f7jd8.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:07 +0000 UTC Normal Pod my-jaeger-query-6b65ff56fb-f7jd8.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:07 +0000 UTC Normal Pod my-jaeger-query-6b65ff56fb-f7jd8.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:07 +0000 UTC Normal Pod my-jaeger-query-6b65ff56fb-f7jd8.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:07 +0000 UTC Normal Pod my-jaeger-query-6b65ff56fb-f7jd8.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:07 +0000 UTC Normal Pod my-jaeger-query-6b65ff56fb-f7jd8.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:07 +0000 UTC Normal Pod my-jaeger-query-6b65ff56fb-f7jd8.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:09 +0000 UTC Normal Pod my-jaeger-5fb899589c-vx6v7.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | 2024-12-02 13:50:09 +0000 UTC Normal Pod my-jaeger-5fb899589c-vx6v7.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:50:18 | es-from-aio-to-production | Deleting namespace: kuttl-test-renewed-impala === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- FAIL: kuttl (1304.48s) --- FAIL: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.90s) --- PASS: kuttl/harness/es-multiinstance (135.05s) --- PASS: kuttl/harness/es-simple-prod (6.21s) --- PASS: kuttl/harness/es-rollover-autoprov (260.98s) --- PASS: kuttl/harness/es-increasing-replicas (130.40s) --- FAIL: kuttl/harness/es-index-cleaner-autoprov (671.88s) --- PASS: kuttl/harness/es-from-aio-to-production (93.88s) FAIL + exit_code=1 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name elasticsearch --report --output /logs/artifacts/elasticsearch.xml ./artifacts/kuttl-report.xml time="2024-12-02T13:50:25Z" level=debug msg="Setting a new name for the test suites" time="2024-12-02T13:50:25Z" level=debug msg="Removing 'artifacts' TestCase" time="2024-12-02T13:50:25Z" level=debug msg="normalizing test case names" time="2024-12-02T13:50:25Z" level=debug msg="elasticsearch/artifacts -> elasticsearch_artifacts" time="2024-12-02T13:50:25Z" level=debug msg="elasticsearch/es-multiinstance -> elasticsearch_es_multiinstance" time="2024-12-02T13:50:25Z" level=debug msg="elasticsearch/es-simple-prod -> elasticsearch_es_simple_prod" time="2024-12-02T13:50:25Z" level=debug msg="elasticsearch/es-rollover-autoprov -> elasticsearch_es_rollover_autoprov" time="2024-12-02T13:50:25Z" level=debug msg="elasticsearch/es-increasing-replicas -> elasticsearch_es_increasing_replicas" time="2024-12-02T13:50:25Z" level=debug msg="elasticsearch/es-index-cleaner-autoprov -> elasticsearch_es_index_cleaner_autoprov" time="2024-12-02T13:50:25Z" level=debug msg="elasticsearch/es-from-aio-to-production -> elasticsearch_es_from_aio_to_production" +-----------------------------------------+--------+ | NAME | RESULT | +-----------------------------------------+--------+ | elasticsearch_artifacts | passed | | elasticsearch_es_multiinstance | passed | | elasticsearch_es_simple_prod | passed | | elasticsearch_es_rollover_autoprov | passed | | elasticsearch_es_increasing_replicas | passed | | elasticsearch_es_index_cleaner_autoprov | failed | | elasticsearch_es_from_aio_to_production | passed | +-----------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh examples false true + '[' 3 -ne 3 ']' + test_suite_name=examples + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/examples.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-examples make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=3.6.0 \ SKIP_KAFKA=false \ VERTX_IMG=jaegertracing/vertx-create-span:operator-e2e-tests \ ./tests/e2e/examples/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.18.0-0.nightly-2024-11-30-141716 True False 41m Cluster version is 4.18.0-0.nightly-2024-11-30-141716' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.18.0-0.nightly-2024-11-30-141716 True False 41m Cluster version is 4.18.0-0.nightly-2024-11-30-141716' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/examples/render.sh ++ export SUITE_DIR=./tests/e2e/examples ++ SUITE_DIR=./tests/e2e/examples ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/examples ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test examples-agent-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-agent-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-with-priority-class\e[0m' Rendering files for test examples-agent-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build + '[' _build '!=' _build ']' + mkdir -p examples-agent-with-priority-class + cd examples-agent-with-priority-class + example_name=agent-with-priority-class + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + '[' true '!=' true ']' + render_install_example agent-with-priority-class 02 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=02 + install_file=./02-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/agent-with-priority-class.yaml -o ./02-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./02-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./02-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./02-install.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + JAEGER_NAME=agent-as-daemonset + local jaeger_strategy ++ get_jaeger_strategy ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./02-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./02-install.yaml ++ strategy=DaemonSet ++ '[' DaemonSet = null ']' ++ echo DaemonSet ++ return 0 + jaeger_strategy=DaemonSet + '[' DaemonSet = DaemonSet ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./02-assert.yaml + render_smoke_test_example agent-with-priority-class 02 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + jaeger_name=agent-as-daemonset + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test agent-as-daemonset true 02 + '[' 3 -ne 3 ']' + jaeger=agent-as-daemonset + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-all-in-one-with-options + '[' 1 -ne 1 ']' + test_name=examples-all-in-one-with-options + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-all-in-one-with-options' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-all-in-one-with-options\e[0m' Rendering files for test examples-all-in-one-with-options + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-with-priority-class + '[' examples-agent-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-all-in-one-with-options + cd examples-all-in-one-with-options + example_name=all-in-one-with-options + render_install_example all-in-one-with-options 00 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/all-in-one-with-options.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + JAEGER_NAME=my-jaeger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.metadata.name="my-jaeger"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i 'del(.spec.allInOne.image)' ./00-install.yaml + render_smoke_test_example all-in-one-with-options 01 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + jaeger_name=my-jaeger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + sed -i s~my-jaeger-query:443~my-jaeger-query:443/jaeger~gi ./01-smoke-test.yaml + start_test examples-business-application-injected-sidecar + '[' 1 -ne 1 ']' + test_name=examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-business-application-injected-sidecar' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-business-application-injected-sidecar\e[0m' Rendering files for test examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-all-in-one-with-options + '[' examples-all-in-one-with-options '!=' _build ']' + cd .. + mkdir -p examples-business-application-injected-sidecar + cd examples-business-application-injected-sidecar + example_name=simplest + cp /tmp/jaeger-tests/examples/business-application-injected-sidecar.yaml ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].image=strenv(VERTX_IMG)' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.path="/"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.port=8080' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.initialDelaySeconds=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.failureThreshold=3' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.periodSeconds=10' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.successThreshold=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.timeoutSeconds=1' ./00-install.yaml + render_install_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example simplest 02 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 02 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-collector-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-collector-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-collector-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-collector-with-priority-class\e[0m' Rendering files for test examples-collector-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-business-application-injected-sidecar + '[' examples-business-application-injected-sidecar '!=' _build ']' + cd .. + mkdir -p examples-collector-with-priority-class + cd examples-collector-with-priority-class + example_name=collector-with-priority-class + render_install_example collector-with-priority-class 00 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/collector-with-priority-class.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + JAEGER_NAME=collector-with-high-priority + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example collector-with-priority-class 01 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + jaeger_name=collector-with-high-priority + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test collector-with-high-priority true 01 + '[' 3 -ne 3 ']' + jaeger=collector-with-high-priority + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + export JAEGER_NAME=collector-with-high-priority + JAEGER_NAME=collector-with-high-priority + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-service-types + '[' 1 -ne 1 ']' + test_name=examples-service-types + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-service-types' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-service-types\e[0m' Rendering files for test examples-service-types + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-collector-with-priority-class + '[' examples-collector-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-service-types + cd examples-service-types + example_name=service-types + render_install_example service-types 00 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/service-types.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + JAEGER_NAME=service-types + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example service-types 01 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/service-types.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/service-types.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/service-types.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/service-types.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + jaeger_name=service-types + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test service-types true 01 + '[' 3 -ne 3 ']' + jaeger=service-types + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + export JAEGER_NAME=service-types + JAEGER_NAME=service-types + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod + '[' 1 -ne 1 ']' + test_name=examples-simple-prod + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod\e[0m' Rendering files for test examples-simple-prod + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-service-types + '[' examples-service-types '!=' _build ']' + cd .. + mkdir -p examples-simple-prod + cd examples-simple-prod + example_name=simple-prod + render_install_example simple-prod 01 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod 02 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod-with-volumes + '[' 1 -ne 1 ']' + test_name=examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod-with-volumes' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod-with-volumes\e[0m' Rendering files for test examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod + '[' examples-simple-prod '!=' _build ']' + cd .. + mkdir -p examples-simple-prod-with-volumes + cd examples-simple-prod-with-volumes + example_name=simple-prod-with-volumes + render_install_example simple-prod-with-volumes 01 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod-with-volumes 02 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + /tmp/jaeger-tests/bin/gomplate -f ./03-check-volume.yaml.template -o 03-check-volume.yaml + start_test examples-simplest + '[' 1 -ne 1 ']' + test_name=examples-simplest + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simplest' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simplest\e[0m' Rendering files for test examples-simplest + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod-with-volumes + '[' examples-simple-prod-with-volumes '!=' _build ']' + cd .. + mkdir -p examples-simplest + cd examples-simplest + example_name=simplest + render_install_example simplest 00 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 01 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger + '[' 1 -ne 1 ']' + test_name=examples-with-badger + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger\e[0m' Rendering files for test examples-with-badger + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simplest + '[' examples-simplest '!=' _build ']' + cd .. + mkdir -p examples-with-badger + cd examples-with-badger + example_name=with-badger + render_install_example with-badger 00 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + JAEGER_NAME=with-badger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger 01 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + jaeger_name=with-badger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + export JAEGER_NAME=with-badger + JAEGER_NAME=with-badger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger-and-volume + '[' 1 -ne 1 ']' + test_name=examples-with-badger-and-volume + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger-and-volume' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger-and-volume\e[0m' Rendering files for test examples-with-badger-and-volume + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger + '[' examples-with-badger '!=' _build ']' + cd .. + mkdir -p examples-with-badger-and-volume + cd examples-with-badger-and-volume + example_name=with-badger-and-volume + render_install_example with-badger-and-volume 00 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger-and-volume.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + JAEGER_NAME=with-badger-and-volume + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger-and-volume 01 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + jaeger_name=with-badger-and-volume + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger-and-volume true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger-and-volume + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + export JAEGER_NAME=with-badger-and-volume + JAEGER_NAME=with-badger-and-volume + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-cassandra + '[' 1 -ne 1 ']' + test_name=examples-with-cassandra + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-cassandra' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-cassandra\e[0m' Rendering files for test examples-with-cassandra + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger-and-volume + '[' examples-with-badger-and-volume '!=' _build ']' + cd .. + mkdir -p examples-with-cassandra + cd examples-with-cassandra + example_name=with-cassandra + render_install_cassandra 00 + '[' 1 -ne 1 ']' + test_step=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-assert.yaml.template -o ./00-assert.yaml + render_install_example with-cassandra 01 + '[' 2 -ne 2 ']' + example_name=with-cassandra + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-cassandra.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=with-cassandra ++ '[' -z with-cassandra ']' ++ echo with-cassandra ++ return 0 + JAEGER_NAME=with-cassandra + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example with-cassandra 02 + '[' 2 -ne 2 ']' + example_name=with-cassandra + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/with-cassandra.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-cassandra.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-cassandra.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-cassandra.yaml ++ jaeger_name=with-cassandra ++ '[' -z with-cassandra ']' ++ echo with-cassandra ++ return 0 + jaeger_name=with-cassandra + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-cassandra true 02 + '[' 3 -ne 3 ']' + jaeger=with-cassandra + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 + JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 + export JAEGER_NAME=with-cassandra + JAEGER_NAME=with-cassandra + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-sampling + '[' 1 -ne 1 ']' + test_name=examples-with-sampling + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-sampling' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-sampling\e[0m' Rendering files for test examples-with-sampling + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-cassandra + '[' examples-with-cassandra '!=' _build ']' + cd .. + mkdir -p examples-with-sampling + cd examples-with-sampling + export example_name=with-sampling + example_name=with-sampling + render_install_cassandra 00 + '[' 1 -ne 1 ']' + test_step=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-assert.yaml.template -o ./00-assert.yaml + render_install_example with-sampling 01 + '[' 2 -ne 2 ']' + example_name=with-sampling + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-sampling.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=with-sampling ++ '[' -z with-sampling ']' ++ echo with-sampling ++ return 0 + JAEGER_NAME=with-sampling + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example with-sampling 02 + '[' 2 -ne 2 ']' + example_name=with-sampling + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/with-sampling.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-sampling.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-sampling.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-sampling.yaml ++ jaeger_name=with-sampling ++ '[' -z with-sampling ']' ++ echo with-sampling ++ return 0 + jaeger_name=with-sampling + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-sampling true 02 + '[' 3 -ne 3 ']' + jaeger=with-sampling + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 + JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 + export JAEGER_NAME=with-sampling + JAEGER_NAME=with-sampling + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-agent-as-daemonset + '[' 1 -ne 1 ']' + test_name=examples-agent-as-daemonset + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-as-daemonset' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-as-daemonset\e[0m' Rendering files for test examples-agent-as-daemonset + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-sampling + '[' examples-with-sampling '!=' _build ']' + cd .. + mkdir -p examples-agent-as-daemonset + cd examples-agent-as-daemonset + '[' true = true ']' + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/agent-as-daemonset.yaml -o 02-install.yaml + '[' true = true ']' + start_test examples-openshift-with-htpasswd + '[' 1 -ne 1 ']' + test_name=examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-openshift-with-htpasswd' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-openshift-with-htpasswd\e[0m' Rendering files for test examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-as-daemonset + '[' examples-agent-as-daemonset '!=' _build ']' + cd .. + mkdir -p examples-openshift-with-htpasswd + cd examples-openshift-with-htpasswd + export JAEGER_NAME=with-htpasswd + JAEGER_NAME=with-htpasswd + export JAEGER_USERNAME=awesomeuser + JAEGER_USERNAME=awesomeuser + export JAEGER_PASSWORD=awesomepassword + JAEGER_PASSWORD=awesomepassword + export 'JAEGER_USER_PASSWORD_HASH=awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' + JAEGER_USER_PASSWORD_HASH='awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ echo 'awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ base64 + SECRET=YXdlc29tZXVzZXI6e1NIQX11VWRxUFZVeXFOQm1FUlUwUXhqM0tGYVpuanc9Cg== + /tmp/jaeger-tests/bin/gomplate -f ./00-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/with-htpasswd.yaml -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + export 'GET_URL_COMMAND=kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + export 'URL=https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + INSECURE=true + JAEGER_USERNAME= + JAEGER_PASSWORD= + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-unsecured.yaml + JAEGER_USERNAME=wronguser + JAEGER_PASSWORD=wrongpassword + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-check-unauthorized.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./04-check-authorized.yaml + skip_test examples-agent-as-daemonset 'This test is flaky in Prow CI' + '[' 2 -ne 2 ']' + test_name=examples-agent-as-daemonset + message='This test is flaky in Prow CI' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-openshift-with-htpasswd + '[' examples-openshift-with-htpasswd '!=' _build ']' + cd .. + rm -rf examples-agent-as-daemonset + warning 'examples-agent-as-daemonset: This test is flaky in Prow CI' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: examples-agent-as-daemonset: This test is flaky in Prow CI\e[0m' WAR: examples-agent-as-daemonset: This test is flaky in Prow CI + skip_test examples-with-badger-and-volume 'This test is flaky in Prow CI' + '[' 2 -ne 2 ']' + test_name=examples-with-badger-and-volume + message='This test is flaky in Prow CI' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build + '[' _build '!=' _build ']' + rm -rf examples-with-badger-and-volume + warning 'examples-with-badger-and-volume: This test is flaky in Prow CI' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: examples-with-badger-and-volume: This test is flaky in Prow CI\e[0m' WAR: examples-with-badger-and-volume: This test is flaky in Prow CI + skip_test examples-collector-with-priority-class 'This test is flaky in Prow CI' + '[' 2 -ne 2 ']' + test_name=examples-collector-with-priority-class + message='This test is flaky in Prow CI' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build + '[' _build '!=' _build ']' + rm -rf examples-collector-with-priority-class + warning 'examples-collector-with-priority-class: This test is flaky in Prow CI' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: examples-collector-with-priority-class: This test is flaky in Prow CI\e[0m' WAR: examples-collector-with-priority-class: This test is flaky in Prow CI make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running examples E2E tests' Running examples E2E tests + cd tests/e2e/examples/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1113221797 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 12 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/examples-agent-with-priority-class === PAUSE kuttl/harness/examples-agent-with-priority-class === RUN kuttl/harness/examples-all-in-one-with-options === PAUSE kuttl/harness/examples-all-in-one-with-options === RUN kuttl/harness/examples-business-application-injected-sidecar === PAUSE kuttl/harness/examples-business-application-injected-sidecar === RUN kuttl/harness/examples-openshift-with-htpasswd === PAUSE kuttl/harness/examples-openshift-with-htpasswd === RUN kuttl/harness/examples-service-types === PAUSE kuttl/harness/examples-service-types === RUN kuttl/harness/examples-simple-prod === PAUSE kuttl/harness/examples-simple-prod === RUN kuttl/harness/examples-simple-prod-with-volumes === PAUSE kuttl/harness/examples-simple-prod-with-volumes === RUN kuttl/harness/examples-simplest === PAUSE kuttl/harness/examples-simplest === RUN kuttl/harness/examples-with-badger === PAUSE kuttl/harness/examples-with-badger === RUN kuttl/harness/examples-with-cassandra === PAUSE kuttl/harness/examples-with-cassandra === RUN kuttl/harness/examples-with-sampling === PAUSE kuttl/harness/examples-with-sampling === CONT kuttl/harness/artifacts logger.go:42: 13:50:56 | artifacts | Creating namespace: kuttl-test-sharing-jaybird logger.go:42: 13:50:56 | artifacts | artifacts events from ns kuttl-test-sharing-jaybird: logger.go:42: 13:50:56 | artifacts | Deleting namespace: kuttl-test-sharing-jaybird === CONT kuttl/harness/examples-simple-prod logger.go:42: 13:51:02 | examples-simple-prod | Creating namespace: kuttl-test-relevant-longhorn logger.go:42: 13:51:02 | examples-simple-prod/1-install | starting test step 1-install logger.go:42: 13:51:02 | examples-simple-prod/1-install | Jaeger:kuttl-test-relevant-longhorn/simple-prod created logger.go:42: 13:51:52 | examples-simple-prod/1-install | test step completed 1-install logger.go:42: 13:51:52 | examples-simple-prod/2-smoke-test | starting test step 2-smoke-test logger.go:42: 13:51:52 | examples-simple-prod/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 13:51:54 | examples-simple-prod/2-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:52:01 | examples-simple-prod/2-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:52:01 | examples-simple-prod/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:52:02 | examples-simple-prod/2-smoke-test | job.batch/report-span created logger.go:42: 13:52:02 | examples-simple-prod/2-smoke-test | job.batch/check-span created logger.go:42: 13:52:16 | examples-simple-prod/2-smoke-test | test step completed 2-smoke-test logger.go:42: 13:52:16 | examples-simple-prod | examples-simple-prod events from ns kuttl-test-relevant-longhorn: logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:20 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestrelevantlonghornsimpleprod-1-f7c9bb4bc SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestrelevantlonghornsimpleprod-1-f7nbffn replicaset-controller logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:20 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrelevantlonghornsimpleprod-1-f7nbffn Binding Scheduled Successfully assigned kuttl-test-relevant-longhorn/elasticsearch-cdm-kuttltestrelevantlonghornsimpleprod-1-f7nbffn to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:20 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestrelevantlonghornsimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestrelevantlonghornsimpleprod-1-f7c9bb4bc to 1 deployment-controller logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:21 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrelevantlonghornsimpleprod-1-f7nbffn AddedInterface Add eth0 [10.128.2.41/23] from ovn-kubernetes multus logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:21 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrelevantlonghornsimpleprod-1-f7nbffn.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:8e4fbea4983cd58352349ca291383169b286bc166fad95a87807552ca43335e6" already present on machine kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:21 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrelevantlonghornsimpleprod-1-f7nbffn.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:21 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrelevantlonghornsimpleprod-1-f7nbffn.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:21 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrelevantlonghornsimpleprod-1-f7nbffn.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:d68824b0b2c84db8e33edf9ab344eb684c4a7ebd7ef162bbc309043adcb28e6b" already present on machine kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:21 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrelevantlonghornsimpleprod-1-f7nbffn.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:21 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestrelevantlonghornsimpleprod-1-f7nbffn.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:31 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestrelevantlonghornsimpleprod-1-f7nbffn.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Elasticsearch node is not ready to accept HTTP requests yet [response code: 000] kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:36 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestrelevantlonghornsimpleprod-1-f7nbffn.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:48 +0000 UTC Normal Pod simple-prod-collector-578c98d86c-7rvqd Binding Scheduled Successfully assigned kuttl-test-relevant-longhorn/simple-prod-collector-578c98d86c-7rvqd to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:48 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-578c98d86c SuccessfulCreate Created pod: simple-prod-collector-578c98d86c-7rvqd replicaset-controller logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:48 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-578c98d86c to 1 deployment-controller logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:48 +0000 UTC Normal Pod simple-prod-query-79f86b44c8-d2gpc Binding Scheduled Successfully assigned kuttl-test-relevant-longhorn/simple-prod-query-79f86b44c8-d2gpc to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:48 +0000 UTC Normal ReplicaSet.apps simple-prod-query-79f86b44c8 SuccessfulCreate Created pod: simple-prod-query-79f86b44c8-d2gpc replicaset-controller logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:48 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-79f86b44c8 to 1 deployment-controller logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:49 +0000 UTC Normal Pod simple-prod-collector-578c98d86c-7rvqd AddedInterface Add eth0 [10.131.0.57/23] from ovn-kubernetes multus logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:49 +0000 UTC Normal Pod simple-prod-collector-578c98d86c-7rvqd.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:44686d560aa27cae8ff8693f88c4cb6e2edf1737010ec1f80709cb42250b729d" already present on machine kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:49 +0000 UTC Normal Pod simple-prod-collector-578c98d86c-7rvqd.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:49 +0000 UTC Normal Pod simple-prod-collector-578c98d86c-7rvqd.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:49 +0000 UTC Normal Pod simple-prod-query-79f86b44c8-d2gpc AddedInterface Add eth0 [10.129.2.44/23] from ovn-kubernetes multus logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:49 +0000 UTC Normal Pod simple-prod-query-79f86b44c8-d2gpc.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:49 +0000 UTC Normal Pod simple-prod-query-79f86b44c8-d2gpc.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:49 +0000 UTC Normal Pod simple-prod-query-79f86b44c8-d2gpc.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:49 +0000 UTC Normal Pod simple-prod-query-79f86b44c8-d2gpc.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:49 +0000 UTC Normal Pod simple-prod-query-79f86b44c8-d2gpc.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:49 +0000 UTC Normal Pod simple-prod-query-79f86b44c8-d2gpc.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:49 +0000 UTC Normal Pod simple-prod-query-79f86b44c8-d2gpc.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:49 +0000 UTC Normal Pod simple-prod-query-79f86b44c8-d2gpc.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:49 +0000 UTC Normal Pod simple-prod-query-79f86b44c8-d2gpc.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:56 +0000 UTC Normal Pod simple-prod-query-79f86b44c8-d2gpc.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:56 +0000 UTC Normal Pod simple-prod-query-79f86b44c8-d2gpc.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:56 +0000 UTC Normal Pod simple-prod-query-79f86b44c8-d2gpc.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:56 +0000 UTC Normal ReplicaSet.apps simple-prod-query-79f86b44c8 SuccessfulDelete Deleted pod: simple-prod-query-79f86b44c8-d2gpc replicaset-controller logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:56 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-79f86b44c8 to 0 from 1 deployment-controller logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:57 +0000 UTC Normal Pod simple-prod-query-bd595984f-rrj8k Binding Scheduled Successfully assigned kuttl-test-relevant-longhorn/simple-prod-query-bd595984f-rrj8k to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:57 +0000 UTC Normal ReplicaSet.apps simple-prod-query-bd595984f SuccessfulCreate Created pod: simple-prod-query-bd595984f-rrj8k replicaset-controller logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:57 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-bd595984f to 1 deployment-controller logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:58 +0000 UTC Normal Pod simple-prod-query-bd595984f-rrj8k AddedInterface Add eth0 [10.129.2.45/23] from ovn-kubernetes multus logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:58 +0000 UTC Normal Pod simple-prod-query-bd595984f-rrj8k.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:58 +0000 UTC Normal Pod simple-prod-query-bd595984f-rrj8k.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:58 +0000 UTC Normal Pod simple-prod-query-bd595984f-rrj8k.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:58 +0000 UTC Normal Pod simple-prod-query-bd595984f-rrj8k.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:58 +0000 UTC Normal Pod simple-prod-query-bd595984f-rrj8k.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:59 +0000 UTC Normal Pod simple-prod-query-bd595984f-rrj8k.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:59 +0000 UTC Normal Pod simple-prod-query-bd595984f-rrj8k.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:59 +0000 UTC Normal Pod simple-prod-query-bd595984f-rrj8k.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:51:59 +0000 UTC Normal Pod simple-prod-query-bd595984f-rrj8k.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:52:02 +0000 UTC Normal Pod check-span-tmmgd Binding Scheduled Successfully assigned kuttl-test-relevant-longhorn/check-span-tmmgd to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:52:02 +0000 UTC Normal Pod check-span-tmmgd AddedInterface Add eth0 [10.131.0.59/23] from ovn-kubernetes multus logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:52:02 +0000 UTC Normal Pod check-span-tmmgd.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:52:02 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-tmmgd job-controller logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:52:02 +0000 UTC Normal Pod report-span-zdtsd Binding Scheduled Successfully assigned kuttl-test-relevant-longhorn/report-span-zdtsd to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:52:02 +0000 UTC Normal Pod report-span-zdtsd AddedInterface Add eth0 [10.131.0.58/23] from ovn-kubernetes multus logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:52:02 +0000 UTC Normal Pod report-span-zdtsd.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:52:02 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-zdtsd job-controller logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:52:03 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:52:03 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:52:03 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:52:04 +0000 UTC Normal Pod report-span-zdtsd.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 1.94s (1.94s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:52:04 +0000 UTC Normal Pod report-span-zdtsd.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:52:04 +0000 UTC Normal Pod report-span-zdtsd.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:52:05 +0000 UTC Normal Pod check-span-tmmgd.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 2.132s (2.132s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:52:05 +0000 UTC Normal Pod check-span-tmmgd.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:52:05 +0000 UTC Normal Pod check-span-tmmgd.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:52:16 | examples-simple-prod | 2024-12-02 13:52:15 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:52:16 | examples-simple-prod | Deleting namespace: kuttl-test-relevant-longhorn === CONT kuttl/harness/examples-with-sampling logger.go:42: 13:52:29 | examples-with-sampling | Creating namespace: kuttl-test-subtle-bird logger.go:42: 13:52:29 | examples-with-sampling/0-install | starting test step 0-install logger.go:42: 13:52:29 | examples-with-sampling/0-install | running command: [sh -c cd /tmp/jaeger-tests && make cassandra STORAGE_NAMESPACE=$NAMESPACE] logger.go:42: 13:52:29 | examples-with-sampling/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 13:52:29 | examples-with-sampling/0-install | >>>> Creating namespace kuttl-test-subtle-bird logger.go:42: 13:52:29 | examples-with-sampling/0-install | kubectl create namespace kuttl-test-subtle-bird 2>&1 | grep -v "already exists" || true logger.go:42: 13:52:30 | examples-with-sampling/0-install | kubectl create -f ./tests/cassandra.yml --namespace kuttl-test-subtle-bird 2>&1 | grep -v "already exists" || true logger.go:42: 13:52:30 | examples-with-sampling/0-install | service/cassandra created logger.go:42: 13:52:30 | examples-with-sampling/0-install | statefulset.apps/cassandra created logger.go:42: 13:52:30 | examples-with-sampling/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 13:52:42 | examples-with-sampling/0-install | test step completed 0-install logger.go:42: 13:52:42 | examples-with-sampling/1-install | starting test step 1-install logger.go:42: 13:52:42 | examples-with-sampling/1-install | Jaeger:kuttl-test-subtle-bird/with-sampling created logger.go:42: 13:52:48 | examples-with-sampling/1-install | test step completed 1-install logger.go:42: 13:52:48 | examples-with-sampling/2-smoke-test | starting test step 2-smoke-test logger.go:42: 13:52:48 | examples-with-sampling/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-sampling /dev/null] logger.go:42: 13:52:50 | examples-with-sampling/2-smoke-test | Warning: resource jaegers/with-sampling is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:52:58 | examples-with-sampling/2-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:52:58 | examples-with-sampling/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:52:59 | examples-with-sampling/2-smoke-test | job.batch/report-span created logger.go:42: 13:52:59 | examples-with-sampling/2-smoke-test | job.batch/check-span created logger.go:42: 13:53:07 | examples-with-sampling/2-smoke-test | test step completed 2-smoke-test logger.go:42: 13:53:07 | examples-with-sampling/3- | starting test step 3- logger.go:42: 13:53:07 | examples-with-sampling/3- | test step completed 3- logger.go:42: 13:53:07 | examples-with-sampling | examples-with-sampling events from ns kuttl-test-subtle-bird: logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:30 +0000 UTC Normal Pod cassandra-0 Binding Scheduled Successfully assigned kuttl-test-subtle-bird/cassandra-0 to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:30 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-0 in StatefulSet cassandra successful statefulset-controller logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:31 +0000 UTC Normal Pod cassandra-0 AddedInterface Add eth0 [10.128.2.42/23] from ovn-kubernetes multus logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:31 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulling Pulling image "cassandra:3.11" kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:35 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulled Successfully pulled image "cassandra:3.11" in 3.757s (3.757s including waiting). Image size: 309817322 bytes. kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:35 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:35 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:35 +0000 UTC Normal Pod cassandra-1 Binding Scheduled Successfully assigned kuttl-test-subtle-bird/cassandra-1 to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:35 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-1 in StatefulSet cassandra successful statefulset-controller logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:36 +0000 UTC Normal Pod cassandra-1 AddedInterface Add eth0 [10.129.2.46/23] from ovn-kubernetes multus logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:36 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulling Pulling image "cassandra:3.11" kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:40 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulled Successfully pulled image "cassandra:3.11" in 3.811s (3.811s including waiting). Image size: 309817322 bytes. kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:40 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:40 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:46 +0000 UTC Normal Pod with-sampling-94cc567c-zwhnk Binding Scheduled Successfully assigned kuttl-test-subtle-bird/with-sampling-94cc567c-zwhnk to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:46 +0000 UTC Normal Pod with-sampling-94cc567c-zwhnk AddedInterface Add eth0 [10.128.2.43/23] from ovn-kubernetes multus logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:46 +0000 UTC Normal Pod with-sampling-94cc567c-zwhnk.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:46 +0000 UTC Normal Pod with-sampling-94cc567c-zwhnk.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:46 +0000 UTC Normal ReplicaSet.apps with-sampling-94cc567c SuccessfulCreate Created pod: with-sampling-94cc567c-zwhnk replicaset-controller logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:46 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled up replica set with-sampling-94cc567c to 1 deployment-controller logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:47 +0000 UTC Normal Pod with-sampling-94cc567c-zwhnk.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:47 +0000 UTC Normal Pod with-sampling-94cc567c-zwhnk.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:47 +0000 UTC Normal Pod with-sampling-94cc567c-zwhnk.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:47 +0000 UTC Normal Pod with-sampling-94cc567c-zwhnk.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:51 +0000 UTC Normal Pod with-sampling-94cc567c-zwhnk.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:51 +0000 UTC Normal Pod with-sampling-94cc567c-zwhnk.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:51 +0000 UTC Normal ReplicaSet.apps with-sampling-94cc567c SuccessfulDelete Deleted pod: with-sampling-94cc567c-zwhnk replicaset-controller logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:51 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled down replica set with-sampling-94cc567c to 0 from 1 deployment-controller logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:52 +0000 UTC Normal Pod with-sampling-6c8fddbd5-q7j98 Binding Scheduled Successfully assigned kuttl-test-subtle-bird/with-sampling-6c8fddbd5-q7j98 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:52 +0000 UTC Normal ReplicaSet.apps with-sampling-6c8fddbd5 SuccessfulCreate Created pod: with-sampling-6c8fddbd5-q7j98 replicaset-controller logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:52 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled up replica set with-sampling-6c8fddbd5 to 1 deployment-controller logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:53 +0000 UTC Normal Pod with-sampling-6c8fddbd5-q7j98 AddedInterface Add eth0 [10.131.0.60/23] from ovn-kubernetes multus logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:53 +0000 UTC Normal Pod with-sampling-6c8fddbd5-q7j98.spec.containers{jaeger} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:55 +0000 UTC Normal Pod with-sampling-6c8fddbd5-q7j98.spec.containers{jaeger} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" in 2.295s (2.295s including waiting). Image size: 147411949 bytes. kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:56 +0000 UTC Normal Pod with-sampling-6c8fddbd5-q7j98.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:56 +0000 UTC Normal Pod with-sampling-6c8fddbd5-q7j98.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:56 +0000 UTC Normal Pod with-sampling-6c8fddbd5-q7j98.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:56 +0000 UTC Normal Pod with-sampling-6c8fddbd5-q7j98.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:56 +0000 UTC Normal Pod with-sampling-6c8fddbd5-q7j98.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:59 +0000 UTC Normal Pod check-span-tpr56 Binding Scheduled Successfully assigned kuttl-test-subtle-bird/check-span-tpr56 to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:59 +0000 UTC Normal Pod check-span-tpr56 AddedInterface Add eth0 [10.128.2.45/23] from ovn-kubernetes multus logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:59 +0000 UTC Normal Pod check-span-tpr56.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:59 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-tpr56 job-controller logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:59 +0000 UTC Normal Pod report-span-9jc8v Binding Scheduled Successfully assigned kuttl-test-subtle-bird/report-span-9jc8v to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:59 +0000 UTC Normal Pod report-span-9jc8v AddedInterface Add eth0 [10.128.2.44/23] from ovn-kubernetes multus logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:59 +0000 UTC Normal Pod report-span-9jc8v.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:52:59 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-9jc8v job-controller logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:53:01 +0000 UTC Normal Pod report-span-9jc8v.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 2.167s (2.167s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:53:02 +0000 UTC Normal Pod report-span-9jc8v.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:53:02 +0000 UTC Normal Pod report-span-9jc8v.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:53:04 +0000 UTC Normal Pod check-span-tpr56.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 4.397s (4.397s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:53:04 +0000 UTC Normal Pod check-span-tpr56.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:53:04 +0000 UTC Normal Pod check-span-tpr56.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:53:07 | examples-with-sampling | 2024-12-02 13:53:06 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:53:07 | examples-with-sampling | Deleting namespace: kuttl-test-subtle-bird === CONT kuttl/harness/examples-with-cassandra logger.go:42: 13:55:44 | examples-with-cassandra | Creating namespace: kuttl-test-proud-lion logger.go:42: 13:55:44 | examples-with-cassandra/0-install | starting test step 0-install logger.go:42: 13:55:44 | examples-with-cassandra/0-install | running command: [sh -c cd /tmp/jaeger-tests && make cassandra STORAGE_NAMESPACE=$NAMESPACE] logger.go:42: 13:55:44 | examples-with-cassandra/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 13:55:44 | examples-with-cassandra/0-install | >>>> Creating namespace kuttl-test-proud-lion logger.go:42: 13:55:44 | examples-with-cassandra/0-install | kubectl create namespace kuttl-test-proud-lion 2>&1 | grep -v "already exists" || true logger.go:42: 13:55:45 | examples-with-cassandra/0-install | kubectl create -f ./tests/cassandra.yml --namespace kuttl-test-proud-lion 2>&1 | grep -v "already exists" || true logger.go:42: 13:55:45 | examples-with-cassandra/0-install | service/cassandra created logger.go:42: 13:55:45 | examples-with-cassandra/0-install | statefulset.apps/cassandra created logger.go:42: 13:55:45 | examples-with-cassandra/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 13:55:49 | examples-with-cassandra/0-install | test step completed 0-install logger.go:42: 13:55:49 | examples-with-cassandra/1-install | starting test step 1-install logger.go:42: 13:55:49 | examples-with-cassandra/1-install | Jaeger:kuttl-test-proud-lion/with-cassandra created logger.go:42: 13:56:06 | examples-with-cassandra/1-install | test step completed 1-install logger.go:42: 13:56:06 | examples-with-cassandra/2-smoke-test | starting test step 2-smoke-test logger.go:42: 13:56:06 | examples-with-cassandra/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-cassandra /dev/null] logger.go:42: 13:56:07 | examples-with-cassandra/2-smoke-test | Warning: resource jaegers/with-cassandra is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:56:14 | examples-with-cassandra/2-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:56:15 | examples-with-cassandra/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:56:15 | examples-with-cassandra/2-smoke-test | job.batch/report-span created logger.go:42: 13:56:15 | examples-with-cassandra/2-smoke-test | job.batch/check-span created logger.go:42: 13:56:28 | examples-with-cassandra/2-smoke-test | test step completed 2-smoke-test logger.go:42: 13:56:28 | examples-with-cassandra | examples-with-cassandra events from ns kuttl-test-proud-lion: logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:55:45 +0000 UTC Normal Pod cassandra-0 Binding Scheduled Successfully assigned kuttl-test-proud-lion/cassandra-0 to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:55:45 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-0 in StatefulSet cassandra successful statefulset-controller logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:55:46 +0000 UTC Normal Pod cassandra-0 AddedInterface Add eth0 [10.128.2.46/23] from ovn-kubernetes multus logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:55:46 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulled Container image "cassandra:3.11" already present on machine kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:55:46 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:55:46 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:55:47 +0000 UTC Normal Pod cassandra-1 Binding Scheduled Successfully assigned kuttl-test-proud-lion/cassandra-1 to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:55:47 +0000 UTC Normal Pod cassandra-1 AddedInterface Add eth0 [10.129.2.47/23] from ovn-kubernetes multus logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:55:47 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulled Container image "cassandra:3.11" already present on machine kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:55:47 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:55:47 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:55:47 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-1 in StatefulSet cassandra successful statefulset-controller logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:55:52 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-wmx2t Binding Scheduled Successfully assigned kuttl-test-proud-lion/with-cassandra-cassandra-schema-job-wmx2t to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:55:52 +0000 UTC Normal Job.batch with-cassandra-cassandra-schema-job SuccessfulCreate Created pod: with-cassandra-cassandra-schema-job-wmx2t job-controller logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:55:53 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-wmx2t AddedInterface Add eth0 [10.131.0.61/23] from ovn-kubernetes multus logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:55:53 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-wmx2t.spec.containers{with-cassandra-cassandra-schema-job} Pulling Pulling image "jaegertracing/jaeger-cassandra-schema:1.57.0" kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:55:58 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-wmx2t.spec.containers{with-cassandra-cassandra-schema-job} Pulled Successfully pulled image "jaegertracing/jaeger-cassandra-schema:1.57.0" in 4.453s (4.453s including waiting). Image size: 381891613 bytes. kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:55:58 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-wmx2t.spec.containers{with-cassandra-cassandra-schema-job} Created Created container with-cassandra-cassandra-schema-job kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:55:58 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-wmx2t.spec.containers{with-cassandra-cassandra-schema-job} Started Started container with-cassandra-cassandra-schema-job kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:03 +0000 UTC Normal Job.batch with-cassandra-cassandra-schema-job Completed Job completed job-controller logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:04 +0000 UTC Normal Pod with-cassandra-6d58957d4b-g954f Binding Scheduled Successfully assigned kuttl-test-proud-lion/with-cassandra-6d58957d4b-g954f to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:04 +0000 UTC Normal Pod with-cassandra-6d58957d4b-g954f AddedInterface Add eth0 [10.131.0.62/23] from ovn-kubernetes multus logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:04 +0000 UTC Normal Pod with-cassandra-6d58957d4b-g954f.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:04 +0000 UTC Normal Pod with-cassandra-6d58957d4b-g954f.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:04 +0000 UTC Normal ReplicaSet.apps with-cassandra-6d58957d4b SuccessfulCreate Created pod: with-cassandra-6d58957d4b-g954f replicaset-controller logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:04 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled up replica set with-cassandra-6d58957d4b to 1 deployment-controller logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:05 +0000 UTC Normal Pod with-cassandra-6d58957d4b-g954f.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:05 +0000 UTC Normal Pod with-cassandra-6d58957d4b-g954f.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:05 +0000 UTC Normal Pod with-cassandra-6d58957d4b-g954f.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:05 +0000 UTC Normal Pod with-cassandra-6d58957d4b-g954f.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:10 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled down replica set with-cassandra-6d58957d4b to 0 from 1 deployment-controller logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:11 +0000 UTC Normal Pod with-cassandra-5b54c6d4f4-2lj42 Binding Scheduled Successfully assigned kuttl-test-proud-lion/with-cassandra-5b54c6d4f4-2lj42 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:11 +0000 UTC Warning Pod with-cassandra-5b54c6d4f4-2lj42 ErrorUpdatingResource addLogicalPort failed for kuttl-test-proud-lion/with-cassandra-5b54c6d4f4-2lj42: failed to update pod kuttl-test-proud-lion/with-cassandra-5b54c6d4f4-2lj42: Operation cannot be fulfilled on pods "with-cassandra-5b54c6d4f4-2lj42": the object has been modified; please apply your changes to the latest version and try again controlplane logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:11 +0000 UTC Normal ReplicaSet.apps with-cassandra-5b54c6d4f4 SuccessfulCreate Created pod: with-cassandra-5b54c6d4f4-2lj42 replicaset-controller logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:11 +0000 UTC Normal Pod with-cassandra-6d58957d4b-g954f.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:11 +0000 UTC Normal Pod with-cassandra-6d58957d4b-g954f.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:11 +0000 UTC Normal ReplicaSet.apps with-cassandra-6d58957d4b SuccessfulDelete Deleted pod: with-cassandra-6d58957d4b-g954f replicaset-controller logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:11 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled up replica set with-cassandra-5b54c6d4f4 to 1 deployment-controller logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:12 +0000 UTC Normal Pod with-cassandra-5b54c6d4f4-2lj42 AddedInterface Add eth0 [10.131.0.64/23] from ovn-kubernetes multus logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:12 +0000 UTC Normal Pod with-cassandra-5b54c6d4f4-2lj42.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:12 +0000 UTC Normal Pod with-cassandra-5b54c6d4f4-2lj42.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:12 +0000 UTC Normal Pod with-cassandra-5b54c6d4f4-2lj42.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:12 +0000 UTC Normal Pod with-cassandra-5b54c6d4f4-2lj42.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:12 +0000 UTC Normal Pod with-cassandra-5b54c6d4f4-2lj42.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:12 +0000 UTC Normal Pod with-cassandra-5b54c6d4f4-2lj42.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:15 +0000 UTC Normal Pod check-span-4pl8p Binding Scheduled Successfully assigned kuttl-test-proud-lion/check-span-4pl8p to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:15 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-4pl8p job-controller logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:15 +0000 UTC Normal Pod report-span-567r5 Binding Scheduled Successfully assigned kuttl-test-proud-lion/report-span-567r5 to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:15 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-567r5 job-controller logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:16 +0000 UTC Normal Pod check-span-4pl8p AddedInterface Add eth0 [10.129.2.48/23] from ovn-kubernetes multus logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:16 +0000 UTC Normal Pod check-span-4pl8p.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:16 +0000 UTC Normal Pod report-span-567r5 AddedInterface Add eth0 [10.128.2.47/23] from ovn-kubernetes multus logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:16 +0000 UTC Normal Pod report-span-567r5.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:17 +0000 UTC Normal Pod check-span-4pl8p.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 1.213s (1.213s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:17 +0000 UTC Normal Pod check-span-4pl8p.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:17 +0000 UTC Normal Pod check-span-4pl8p.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:17 +0000 UTC Normal Pod report-span-567r5.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 932ms (932ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:17 +0000 UTC Normal Pod report-span-567r5.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:17 +0000 UTC Normal Pod report-span-567r5.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:56:28 | examples-with-cassandra | 2024-12-02 13:56:28 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:56:29 | examples-with-cassandra | Deleting namespace: kuttl-test-proud-lion === CONT kuttl/harness/examples-with-badger logger.go:42: 13:56:42 | examples-with-badger | Creating namespace: kuttl-test-next-woodcock logger.go:42: 13:56:42 | examples-with-badger/0-install | starting test step 0-install logger.go:42: 13:56:42 | examples-with-badger/0-install | Jaeger:kuttl-test-next-woodcock/with-badger created logger.go:42: 13:56:48 | examples-with-badger/0-install | test step completed 0-install logger.go:42: 13:56:48 | examples-with-badger/1-smoke-test | starting test step 1-smoke-test logger.go:42: 13:56:48 | examples-with-badger/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-badger /dev/null] logger.go:42: 13:56:49 | examples-with-badger/1-smoke-test | Warning: resource jaegers/with-badger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:56:58 | examples-with-badger/1-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:56:58 | examples-with-badger/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:56:59 | examples-with-badger/1-smoke-test | job.batch/report-span created logger.go:42: 13:56:59 | examples-with-badger/1-smoke-test | job.batch/check-span created logger.go:42: 13:57:11 | examples-with-badger/1-smoke-test | test step completed 1-smoke-test logger.go:42: 13:57:11 | examples-with-badger | examples-with-badger events from ns kuttl-test-next-woodcock: logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:46 +0000 UTC Normal Pod with-badger-7cb75d956d-bmqw7 Binding Scheduled Successfully assigned kuttl-test-next-woodcock/with-badger-7cb75d956d-bmqw7 to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:46 +0000 UTC Warning Pod with-badger-7cb75d956d-bmqw7 FailedMount MountVolume.SetUp failed for volume "with-badger-ui-oauth-proxy-tls" : secret "with-badger-ui-oauth-proxy-tls" not found kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:46 +0000 UTC Warning Pod with-badger-7cb75d956d-bmqw7 FailedMount MountVolume.SetUp failed for volume "with-badger-collector-tls-config-volume" : secret "with-badger-collector-headless-tls" not found kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:46 +0000 UTC Normal ReplicaSet.apps with-badger-7cb75d956d SuccessfulCreate Created pod: with-badger-7cb75d956d-bmqw7 replicaset-controller logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:46 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-7cb75d956d to 1 deployment-controller logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:47 +0000 UTC Normal Pod with-badger-7cb75d956d-bmqw7 AddedInterface Add eth0 [10.128.2.48/23] from ovn-kubernetes multus logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:47 +0000 UTC Normal Pod with-badger-7cb75d956d-bmqw7.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:47 +0000 UTC Normal Pod with-badger-7cb75d956d-bmqw7.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:47 +0000 UTC Normal Pod with-badger-7cb75d956d-bmqw7.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:47 +0000 UTC Normal Pod with-badger-7cb75d956d-bmqw7.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:48 +0000 UTC Normal Pod with-badger-7cb75d956d-bmqw7.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:48 +0000 UTC Normal Pod with-badger-7cb75d956d-bmqw7.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:54 +0000 UTC Normal Pod with-badger-7cb75d956d-bmqw7.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:54 +0000 UTC Normal Pod with-badger-7cb75d956d-bmqw7.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:54 +0000 UTC Normal ReplicaSet.apps with-badger-7cb75d956d SuccessfulDelete Deleted pod: with-badger-7cb75d956d-bmqw7 replicaset-controller logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:54 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled down replica set with-badger-7cb75d956d to 0 from 1 deployment-controller logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:55 +0000 UTC Normal Pod with-badger-578f4d758b-wlwqq Binding Scheduled Successfully assigned kuttl-test-next-woodcock/with-badger-578f4d758b-wlwqq to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:55 +0000 UTC Normal ReplicaSet.apps with-badger-578f4d758b SuccessfulCreate Created pod: with-badger-578f4d758b-wlwqq replicaset-controller logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:55 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-578f4d758b to 1 deployment-controller logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:56 +0000 UTC Warning Pod with-badger-578f4d758b-wlwqq FailedMount MountVolume.SetUp failed for volume "with-badger-collector-tls-config-volume" : failed to sync secret cache: timed out waiting for the condition kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:57 +0000 UTC Normal Pod with-badger-578f4d758b-wlwqq AddedInterface Add eth0 [10.128.2.49/23] from ovn-kubernetes multus logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:57 +0000 UTC Normal Pod with-badger-578f4d758b-wlwqq.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:57 +0000 UTC Normal Pod with-badger-578f4d758b-wlwqq.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:57 +0000 UTC Normal Pod with-badger-578f4d758b-wlwqq.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:57 +0000 UTC Normal Pod with-badger-578f4d758b-wlwqq.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:57 +0000 UTC Normal Pod with-badger-578f4d758b-wlwqq.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:57 +0000 UTC Normal Pod with-badger-578f4d758b-wlwqq.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:59 +0000 UTC Normal Pod check-span-r58qq Binding Scheduled Successfully assigned kuttl-test-next-woodcock/check-span-r58qq to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:59 +0000 UTC Normal Pod check-span-r58qq AddedInterface Add eth0 [10.129.2.49/23] from ovn-kubernetes multus logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:59 +0000 UTC Normal Pod check-span-r58qq.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:59 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-r58qq job-controller logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:59 +0000 UTC Normal Pod report-span-645jm Binding Scheduled Successfully assigned kuttl-test-next-woodcock/report-span-645jm to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:59 +0000 UTC Normal Pod report-span-645jm AddedInterface Add eth0 [10.131.0.65/23] from ovn-kubernetes multus logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:59 +0000 UTC Normal Pod report-span-645jm.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:56:59 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-645jm job-controller logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:57:00 +0000 UTC Normal Pod check-span-r58qq.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 675ms (675ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:57:00 +0000 UTC Normal Pod check-span-r58qq.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:57:00 +0000 UTC Normal Pod check-span-r58qq.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:57:00 +0000 UTC Normal Pod report-span-645jm.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 624ms (624ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:57:00 +0000 UTC Normal Pod report-span-645jm.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:57:00 +0000 UTC Normal Pod report-span-645jm.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:57:11 | examples-with-badger | 2024-12-02 13:57:10 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:57:11 | examples-with-badger | Deleting namespace: kuttl-test-next-woodcock === CONT kuttl/harness/examples-simplest logger.go:42: 13:57:24 | examples-simplest | Creating namespace: kuttl-test-known-terrier logger.go:42: 13:57:24 | examples-simplest/0-install | starting test step 0-install logger.go:42: 13:57:24 | examples-simplest/0-install | Jaeger:kuttl-test-known-terrier/simplest created logger.go:42: 13:57:30 | examples-simplest/0-install | test step completed 0-install logger.go:42: 13:57:30 | examples-simplest/1-smoke-test | starting test step 1-smoke-test logger.go:42: 13:57:30 | examples-simplest/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 13:57:31 | examples-simplest/1-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:57:39 | examples-simplest/1-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:57:39 | examples-simplest/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:57:40 | examples-simplest/1-smoke-test | job.batch/report-span created logger.go:42: 13:57:40 | examples-simplest/1-smoke-test | job.batch/check-span created logger.go:42: 13:57:53 | examples-simplest/1-smoke-test | test step completed 1-smoke-test logger.go:42: 13:57:53 | examples-simplest | examples-simplest events from ns kuttl-test-known-terrier: logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:28 +0000 UTC Normal Pod simplest-569f9b7b8c-c7gzh Binding Scheduled Successfully assigned kuttl-test-known-terrier/simplest-569f9b7b8c-c7gzh to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:28 +0000 UTC Normal ReplicaSet.apps simplest-569f9b7b8c SuccessfulCreate Created pod: simplest-569f9b7b8c-c7gzh replicaset-controller logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:28 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-569f9b7b8c to 1 deployment-controller logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:29 +0000 UTC Normal Pod simplest-569f9b7b8c-c7gzh AddedInterface Add eth0 [10.128.2.50/23] from ovn-kubernetes multus logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:29 +0000 UTC Normal Pod simplest-569f9b7b8c-c7gzh.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:29 +0000 UTC Normal Pod simplest-569f9b7b8c-c7gzh.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:29 +0000 UTC Normal Pod simplest-569f9b7b8c-c7gzh.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:29 +0000 UTC Normal Pod simplest-569f9b7b8c-c7gzh.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:29 +0000 UTC Normal Pod simplest-569f9b7b8c-c7gzh.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:29 +0000 UTC Normal Pod simplest-569f9b7b8c-c7gzh.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:36 +0000 UTC Normal Pod simplest-569f9b7b8c-c7gzh.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:36 +0000 UTC Normal Pod simplest-569f9b7b8c-c7gzh.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:36 +0000 UTC Normal ReplicaSet.apps simplest-569f9b7b8c SuccessfulDelete Deleted pod: simplest-569f9b7b8c-c7gzh replicaset-controller logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:36 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-569f9b7b8c to 0 from 1 deployment-controller logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:37 +0000 UTC Normal Pod simplest-558974574f-lvtpz Binding Scheduled Successfully assigned kuttl-test-known-terrier/simplest-558974574f-lvtpz to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:37 +0000 UTC Normal ReplicaSet.apps simplest-558974574f SuccessfulCreate Created pod: simplest-558974574f-lvtpz replicaset-controller logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:37 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-558974574f to 1 deployment-controller logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:38 +0000 UTC Normal Pod simplest-558974574f-lvtpz AddedInterface Add eth0 [10.128.2.51/23] from ovn-kubernetes multus logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:38 +0000 UTC Normal Pod simplest-558974574f-lvtpz.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:38 +0000 UTC Normal Pod simplest-558974574f-lvtpz.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:38 +0000 UTC Normal Pod simplest-558974574f-lvtpz.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:38 +0000 UTC Normal Pod simplest-558974574f-lvtpz.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:38 +0000 UTC Normal Pod simplest-558974574f-lvtpz.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:38 +0000 UTC Normal Pod simplest-558974574f-lvtpz.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:40 +0000 UTC Normal Pod check-span-fjphz Binding Scheduled Successfully assigned kuttl-test-known-terrier/check-span-fjphz to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:40 +0000 UTC Normal Pod check-span-fjphz AddedInterface Add eth0 [10.129.2.50/23] from ovn-kubernetes multus logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:40 +0000 UTC Normal Pod check-span-fjphz.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:40 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-fjphz job-controller logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:40 +0000 UTC Normal Pod report-span-2q45q Binding Scheduled Successfully assigned kuttl-test-known-terrier/report-span-2q45q to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:40 +0000 UTC Normal Pod report-span-2q45q AddedInterface Add eth0 [10.131.0.66/23] from ovn-kubernetes multus logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:40 +0000 UTC Normal Pod report-span-2q45q.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:40 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-2q45q job-controller logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:41 +0000 UTC Normal Pod report-span-2q45q.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 706ms (706ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:41 +0000 UTC Normal Pod report-span-2q45q.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:41 +0000 UTC Normal Pod report-span-2q45q.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:42 +0000 UTC Normal Pod check-span-fjphz.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 1.086s (1.086s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:42 +0000 UTC Normal Pod check-span-fjphz.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:42 +0000 UTC Normal Pod check-span-fjphz.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:57:53 | examples-simplest | 2024-12-02 13:57:53 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:57:53 | examples-simplest | Deleting namespace: kuttl-test-known-terrier === CONT kuttl/harness/examples-simple-prod-with-volumes logger.go:42: 13:58:06 | examples-simple-prod-with-volumes | Ignoring 03-check-volume.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 13:58:06 | examples-simple-prod-with-volumes | Creating namespace: kuttl-test-select-locust logger.go:42: 13:58:06 | examples-simple-prod-with-volumes/1-install | starting test step 1-install logger.go:42: 13:58:06 | examples-simple-prod-with-volumes/1-install | Jaeger:kuttl-test-select-locust/simple-prod created logger.go:42: 13:58:55 | examples-simple-prod-with-volumes/1-install | test step completed 1-install logger.go:42: 13:58:55 | examples-simple-prod-with-volumes/2-smoke-test | starting test step 2-smoke-test logger.go:42: 13:58:55 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simple-prod /dev/null] logger.go:42: 13:58:57 | examples-simple-prod-with-volumes/2-smoke-test | Warning: resource jaegers/simple-prod is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:59:04 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:59:05 | examples-simple-prod-with-volumes/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:59:05 | examples-simple-prod-with-volumes/2-smoke-test | job.batch/report-span created logger.go:42: 13:59:05 | examples-simple-prod-with-volumes/2-smoke-test | job.batch/check-span created logger.go:42: 13:59:16 | examples-simple-prod-with-volumes/2-smoke-test | test step completed 2-smoke-test logger.go:42: 13:59:16 | examples-simple-prod-with-volumes/3-check-volume | starting test step 3-check-volume logger.go:42: 13:59:16 | examples-simple-prod-with-volumes/3-check-volume | running command: [sh -c kubectl exec $(kubectl get pods -n $NAMESPACE -l app=jaeger -l app.kubernetes.io/component=collector -o yaml | /tmp/jaeger-tests/bin/yq e '.items[0].metadata.name') -n $NAMESPACE -- ls /usr/share/elasticsearch/data] logger.go:42: 13:59:16 | examples-simple-prod-with-volumes/3-check-volume | test step completed 3-check-volume logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | examples-simple-prod-with-volumes events from ns kuttl-test-select-locust: logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:25 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestselectlocustsimpleprod-1-6c6c66d987 SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestselectlocustsimpleprod-1-6c6c66t55dh replicaset-controller logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:25 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestselectlocustsimpleprod-1-6c6c66t55dh Binding Scheduled Successfully assigned kuttl-test-select-locust/elasticsearch-cdm-kuttltestselectlocustsimpleprod-1-6c6c66t55dh to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:25 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestselectlocustsimpleprod-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestselectlocustsimpleprod-1-6c6c66d987 to 1 deployment-controller logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestselectlocustsimpleprod-1-6c6c66t55dh AddedInterface Add eth0 [10.128.2.52/23] from ovn-kubernetes multus logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestselectlocustsimpleprod-1-6c6c66t55dh.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:8e4fbea4983cd58352349ca291383169b286bc166fad95a87807552ca43335e6" already present on machine kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestselectlocustsimpleprod-1-6c6c66t55dh.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestselectlocustsimpleprod-1-6c6c66t55dh.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestselectlocustsimpleprod-1-6c6c66t55dh.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:d68824b0b2c84db8e33edf9ab344eb684c4a7ebd7ef162bbc309043adcb28e6b" already present on machine kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestselectlocustsimpleprod-1-6c6c66t55dh.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:26 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestselectlocustsimpleprod-1-6c6c66t55dh.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:41 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestselectlocustsimpleprod-1-6c6c66t55dh.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:52 +0000 UTC Normal Pod simple-prod-collector-698ff7f4f-92d96 Binding Scheduled Successfully assigned kuttl-test-select-locust/simple-prod-collector-698ff7f4f-92d96 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:52 +0000 UTC Warning Pod simple-prod-collector-698ff7f4f-92d96 FailedMount MountVolume.SetUp failed for volume "simple-prod-collector-tls-config-volume" : secret "simple-prod-collector-headless-tls" not found kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:52 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-698ff7f4f SuccessfulCreate Created pod: simple-prod-collector-698ff7f4f-92d96 replicaset-controller logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:52 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-698ff7f4f to 1 deployment-controller logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:52 +0000 UTC Normal Pod simple-prod-query-7795b578dd-s2qnv Binding Scheduled Successfully assigned kuttl-test-select-locust/simple-prod-query-7795b578dd-s2qnv to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:52 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7795b578dd SuccessfulCreate Created pod: simple-prod-query-7795b578dd-s2qnv replicaset-controller logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:52 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-7795b578dd to 1 deployment-controller logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:53 +0000 UTC Normal Pod simple-prod-collector-698ff7f4f-92d96 AddedInterface Add eth0 [10.131.0.67/23] from ovn-kubernetes multus logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:53 +0000 UTC Normal Pod simple-prod-collector-698ff7f4f-92d96.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:44686d560aa27cae8ff8693f88c4cb6e2edf1737010ec1f80709cb42250b729d" already present on machine kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:53 +0000 UTC Normal Pod simple-prod-collector-698ff7f4f-92d96.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:53 +0000 UTC Normal Pod simple-prod-collector-698ff7f4f-92d96.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:53 +0000 UTC Normal Pod simple-prod-query-7795b578dd-s2qnv AddedInterface Add eth0 [10.129.2.51/23] from ovn-kubernetes multus logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:53 +0000 UTC Normal Pod simple-prod-query-7795b578dd-s2qnv.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:53 +0000 UTC Normal Pod simple-prod-query-7795b578dd-s2qnv.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:53 +0000 UTC Normal Pod simple-prod-query-7795b578dd-s2qnv.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:53 +0000 UTC Normal Pod simple-prod-query-7795b578dd-s2qnv.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:53 +0000 UTC Normal Pod simple-prod-query-7795b578dd-s2qnv.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:53 +0000 UTC Normal Pod simple-prod-query-7795b578dd-s2qnv.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:53 +0000 UTC Normal Pod simple-prod-query-7795b578dd-s2qnv.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:53 +0000 UTC Normal Pod simple-prod-query-7795b578dd-s2qnv.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:58:53 +0000 UTC Normal Pod simple-prod-query-7795b578dd-s2qnv.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:00 +0000 UTC Normal Pod simple-prod-query-7795b578dd-s2qnv.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:00 +0000 UTC Normal Pod simple-prod-query-7795b578dd-s2qnv.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:00 +0000 UTC Normal Pod simple-prod-query-7795b578dd-s2qnv.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:00 +0000 UTC Normal ReplicaSet.apps simple-prod-query-7795b578dd SuccessfulDelete Deleted pod: simple-prod-query-7795b578dd-s2qnv replicaset-controller logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:00 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled down replica set simple-prod-query-7795b578dd to 0 from 1 deployment-controller logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:01 +0000 UTC Normal Pod simple-prod-query-86d58cdd7f-fnvg2 Binding Scheduled Successfully assigned kuttl-test-select-locust/simple-prod-query-86d58cdd7f-fnvg2 to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:01 +0000 UTC Normal Pod simple-prod-query-86d58cdd7f-fnvg2 AddedInterface Add eth0 [10.129.2.52/23] from ovn-kubernetes multus logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:01 +0000 UTC Normal Pod simple-prod-query-86d58cdd7f-fnvg2.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:01 +0000 UTC Normal ReplicaSet.apps simple-prod-query-86d58cdd7f SuccessfulCreate Created pod: simple-prod-query-86d58cdd7f-fnvg2 replicaset-controller logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:01 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-86d58cdd7f to 1 deployment-controller logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:02 +0000 UTC Normal Pod simple-prod-query-86d58cdd7f-fnvg2.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:02 +0000 UTC Normal Pod simple-prod-query-86d58cdd7f-fnvg2.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:02 +0000 UTC Normal Pod simple-prod-query-86d58cdd7f-fnvg2.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:02 +0000 UTC Normal Pod simple-prod-query-86d58cdd7f-fnvg2.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:02 +0000 UTC Normal Pod simple-prod-query-86d58cdd7f-fnvg2.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:02 +0000 UTC Normal Pod simple-prod-query-86d58cdd7f-fnvg2.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:02 +0000 UTC Normal Pod simple-prod-query-86d58cdd7f-fnvg2.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:02 +0000 UTC Normal Pod simple-prod-query-86d58cdd7f-fnvg2.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:05 +0000 UTC Normal Pod check-span-jdf82 Binding Scheduled Successfully assigned kuttl-test-select-locust/check-span-jdf82 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:05 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-jdf82 job-controller logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:05 +0000 UTC Normal Pod report-span-k7m2b Binding Scheduled Successfully assigned kuttl-test-select-locust/report-span-k7m2b to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:05 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-k7m2b job-controller logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:06 +0000 UTC Normal Pod check-span-jdf82 AddedInterface Add eth0 [10.131.0.69/23] from ovn-kubernetes multus logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:06 +0000 UTC Normal Pod check-span-jdf82.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:06 +0000 UTC Normal Pod report-span-k7m2b AddedInterface Add eth0 [10.131.0.68/23] from ovn-kubernetes multus logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:06 +0000 UTC Normal Pod report-span-k7m2b.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:06 +0000 UTC Normal Pod report-span-k7m2b.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 693ms (693ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:07 +0000 UTC Normal Pod report-span-k7m2b.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:07 +0000 UTC Normal Pod report-span-k7m2b.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:07 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:07 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:07 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:12 +0000 UTC Normal Pod check-span-jdf82.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 5.767s (5.767s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:12 +0000 UTC Normal Pod check-span-jdf82.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:12 +0000 UTC Normal Pod check-span-jdf82.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | 2024-12-02 13:59:15 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 13:59:16 | examples-simple-prod-with-volumes | Deleting namespace: kuttl-test-select-locust === CONT kuttl/harness/examples-business-application-injected-sidecar logger.go:42: 13:59:30 | examples-business-application-injected-sidecar | Creating namespace: kuttl-test-informed-aardvark logger.go:42: 13:59:30 | examples-business-application-injected-sidecar/0-install | starting test step 0-install logger.go:42: 13:59:30 | examples-business-application-injected-sidecar/0-install | Deployment:kuttl-test-informed-aardvark/myapp created logger.go:42: 13:59:30 | examples-business-application-injected-sidecar/0-install | test step completed 0-install logger.go:42: 13:59:30 | examples-business-application-injected-sidecar/1-install | starting test step 1-install logger.go:42: 13:59:30 | examples-business-application-injected-sidecar/1-install | Jaeger:kuttl-test-informed-aardvark/simplest created logger.go:42: 13:59:45 | examples-business-application-injected-sidecar/1-install | test step completed 1-install logger.go:42: 13:59:45 | examples-business-application-injected-sidecar/2-smoke-test | starting test step 2-smoke-test logger.go:42: 13:59:45 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 13:59:47 | examples-business-application-injected-sidecar/2-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 13:59:54 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 13:59:54 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 13:59:54 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/report-span created logger.go:42: 13:59:54 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/check-span created logger.go:42: 14:00:08 | examples-business-application-injected-sidecar/2-smoke-test | test step completed 2-smoke-test logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | examples-business-application-injected-sidecar events from ns kuttl-test-informed-aardvark: logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:30 +0000 UTC Normal Pod myapp-fc54f78d8-vzkdf Binding Scheduled Successfully assigned kuttl-test-informed-aardvark/myapp-fc54f78d8-vzkdf to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:30 +0000 UTC Normal ReplicaSet.apps myapp-fc54f78d8 SuccessfulCreate Created pod: myapp-fc54f78d8-vzkdf replicaset-controller logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:30 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-fc54f78d8 to 1 deployment-controller logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:31 +0000 UTC Normal Pod myapp-fc54f78d8-vzkdf AddedInterface Add eth0 [10.128.2.53/23] from ovn-kubernetes multus logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:31 +0000 UTC Normal Pod myapp-fc54f78d8-vzkdf.spec.containers{myapp} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:34 +0000 UTC Normal Pod myapp-c4d4bf594-6djvc Binding Scheduled Successfully assigned kuttl-test-informed-aardvark/myapp-c4d4bf594-6djvc to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:34 +0000 UTC Warning Pod myapp-c4d4bf594-6djvc FailedMount MountVolume.SetUp failed for volume "simplest-service-ca" : configmap "simplest-service-ca" not found kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:34 +0000 UTC Warning Pod myapp-c4d4bf594-6djvc FailedMount MountVolume.SetUp failed for volume "simplest-trusted-ca" : configmap "simplest-trusted-ca" not found kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:34 +0000 UTC Normal ReplicaSet.apps myapp-c4d4bf594 SuccessfulCreate Created pod: myapp-c4d4bf594-6djvc replicaset-controller logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:34 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-c4d4bf594 to 1 deployment-controller logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:36 +0000 UTC Normal Pod myapp-fc54f78d8-vzkdf.spec.containers{myapp} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 5.815s (5.815s including waiting). Image size: 282912835 bytes. kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:36 +0000 UTC Normal Pod myapp-fc54f78d8-vzkdf.spec.containers{myapp} Created Created container myapp kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:37 +0000 UTC Normal Pod myapp-fc54f78d8-vzkdf.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:41 +0000 UTC Warning Pod myapp-fc54f78d8-vzkdf.spec.containers{myapp} Unhealthy Liveness probe failed: Get "http://10.128.2.53:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:42 +0000 UTC Normal Pod simplest-5896d48cb5-6q5mr Binding Scheduled Successfully assigned kuttl-test-informed-aardvark/simplest-5896d48cb5-6q5mr to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:42 +0000 UTC Warning Pod simplest-5896d48cb5-6q5mr FailedMount MountVolume.SetUp failed for volume "simplest-collector-tls-config-volume" : secret "simplest-collector-headless-tls" not found kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:42 +0000 UTC Normal ReplicaSet.apps simplest-5896d48cb5 SuccessfulCreate Created pod: simplest-5896d48cb5-6q5mr replicaset-controller logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:42 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-5896d48cb5 to 1 deployment-controller logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:43 +0000 UTC Normal Pod simplest-5896d48cb5-6q5mr AddedInterface Add eth0 [10.128.2.54/23] from ovn-kubernetes multus logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:43 +0000 UTC Normal Pod simplest-5896d48cb5-6q5mr.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:43 +0000 UTC Normal Pod simplest-5896d48cb5-6q5mr.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:43 +0000 UTC Normal Pod simplest-5896d48cb5-6q5mr.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:43 +0000 UTC Normal Pod simplest-5896d48cb5-6q5mr.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:43 +0000 UTC Normal Pod simplest-5896d48cb5-6q5mr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:43 +0000 UTC Normal Pod simplest-5896d48cb5-6q5mr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:50 +0000 UTC Normal Pod myapp-c4d4bf594-6djvc AddedInterface Add eth0 [10.131.0.70/23] from ovn-kubernetes multus logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:50 +0000 UTC Normal Pod myapp-c4d4bf594-6djvc.spec.containers{myapp} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:50 +0000 UTC Normal Pod simplest-5896d48cb5-6q5mr.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:50 +0000 UTC Normal Pod simplest-5896d48cb5-6q5mr.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:50 +0000 UTC Normal ReplicaSet.apps simplest-5896d48cb5 SuccessfulDelete Deleted pod: simplest-5896d48cb5-6q5mr replicaset-controller logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:50 +0000 UTC Normal Pod simplest-7b8f4b7f5c-ckng6 Binding Scheduled Successfully assigned kuttl-test-informed-aardvark/simplest-7b8f4b7f5c-ckng6 to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:50 +0000 UTC Normal ReplicaSet.apps simplest-7b8f4b7f5c SuccessfulCreate Created pod: simplest-7b8f4b7f5c-ckng6 replicaset-controller logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:50 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-5896d48cb5 to 0 from 1 deployment-controller logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:50 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-7b8f4b7f5c to 1 deployment-controller logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:51 +0000 UTC Normal Pod simplest-7b8f4b7f5c-ckng6 AddedInterface Add eth0 [10.128.2.55/23] from ovn-kubernetes multus logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:51 +0000 UTC Normal Pod simplest-7b8f4b7f5c-ckng6.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:51 +0000 UTC Normal Pod simplest-7b8f4b7f5c-ckng6.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:51 +0000 UTC Normal Pod simplest-7b8f4b7f5c-ckng6.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:51 +0000 UTC Normal Pod simplest-7b8f4b7f5c-ckng6.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:51 +0000 UTC Normal Pod simplest-7b8f4b7f5c-ckng6.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:51 +0000 UTC Normal Pod simplest-7b8f4b7f5c-ckng6.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:53 +0000 UTC Normal Pod myapp-c4d4bf594-6djvc.spec.containers{myapp} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.2s (3.2s including waiting). Image size: 282912835 bytes. kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:53 +0000 UTC Normal Pod myapp-c4d4bf594-6djvc.spec.containers{myapp} Created Created container myapp kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:53 +0000 UTC Normal Pod myapp-c4d4bf594-6djvc.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:53 +0000 UTC Normal Pod myapp-c4d4bf594-6djvc.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:54 +0000 UTC Normal Pod check-span-9pdfh Binding Scheduled Successfully assigned kuttl-test-informed-aardvark/check-span-9pdfh to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:54 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-9pdfh job-controller logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:54 +0000 UTC Normal Pod myapp-c4d4bf594-6djvc.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:54 +0000 UTC Normal Pod myapp-c4d4bf594-6djvc.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:54 +0000 UTC Normal Pod report-span-m5wwt Binding Scheduled Successfully assigned kuttl-test-informed-aardvark/report-span-m5wwt to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:54 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-m5wwt job-controller logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:55 +0000 UTC Normal Pod check-span-9pdfh AddedInterface Add eth0 [10.129.2.54/23] from ovn-kubernetes multus logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:55 +0000 UTC Normal Pod check-span-9pdfh.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:55 +0000 UTC Normal Pod myapp-fc54f78d8-vzkdf.spec.containers{myapp} Killing Stopping container myapp kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:55 +0000 UTC Normal ReplicaSet.apps myapp-fc54f78d8 SuccessfulDelete Deleted pod: myapp-fc54f78d8-vzkdf replicaset-controller logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:55 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled down replica set myapp-fc54f78d8 to 0 from 1 deployment-controller logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:55 +0000 UTC Normal Pod report-span-m5wwt AddedInterface Add eth0 [10.129.2.53/23] from ovn-kubernetes multus logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:55 +0000 UTC Normal Pod report-span-m5wwt.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:56 +0000 UTC Normal Pod check-span-9pdfh.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 1.047s (1.047s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:56 +0000 UTC Normal Pod check-span-9pdfh.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:56 +0000 UTC Normal Pod check-span-9pdfh.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:57 +0000 UTC Normal Pod report-span-m5wwt.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 1.771s (1.771s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:57 +0000 UTC Normal Pod report-span-m5wwt.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 13:59:57 +0000 UTC Normal Pod report-span-m5wwt.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 14:00:01 +0000 UTC Warning Pod myapp-c4d4bf594-6djvc.spec.containers{myapp} Unhealthy Liveness probe failed: Get "http://10.131.0.70:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | 2024-12-02 14:00:07 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 14:00:08 | examples-business-application-injected-sidecar | Deleting namespace: kuttl-test-informed-aardvark === CONT kuttl/harness/examples-service-types logger.go:42: 14:00:21 | examples-service-types | Creating namespace: kuttl-test-sacred-flamingo logger.go:42: 14:00:21 | examples-service-types/0-install | starting test step 0-install logger.go:42: 14:00:21 | examples-service-types/0-install | Jaeger:kuttl-test-sacred-flamingo/service-types created logger.go:42: 14:00:30 | examples-service-types/0-install | test step completed 0-install logger.go:42: 14:00:30 | examples-service-types/1-smoke-test | starting test step 1-smoke-test logger.go:42: 14:00:30 | examples-service-types/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE service-types /dev/null] logger.go:42: 14:00:32 | examples-service-types/1-smoke-test | Warning: resource jaegers/service-types is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 14:00:39 | examples-service-types/1-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://service-types-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 14:00:39 | examples-service-types/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 14:00:40 | examples-service-types/1-smoke-test | job.batch/report-span created logger.go:42: 14:00:40 | examples-service-types/1-smoke-test | job.batch/check-span created logger.go:42: 14:00:49 | examples-service-types/1-smoke-test | test step completed 1-smoke-test logger.go:42: 14:00:49 | examples-service-types/2- | starting test step 2- logger.go:42: 14:00:49 | examples-service-types/2- | test step completed 2- logger.go:42: 14:00:49 | examples-service-types | examples-service-types events from ns kuttl-test-sacred-flamingo: logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:25 +0000 UTC Normal Service service-types-collector EnsuringLoadBalancer Ensuring load balancer service-controller logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:26 +0000 UTC Normal Pod service-types-5bdc4647d5-rxtd2 Binding Scheduled Successfully assigned kuttl-test-sacred-flamingo/service-types-5bdc4647d5-rxtd2 to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:26 +0000 UTC Normal Pod service-types-5bdc4647d5-rxtd2 AddedInterface Add eth0 [10.128.2.57/23] from ovn-kubernetes multus logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:26 +0000 UTC Normal Pod service-types-5bdc4647d5-rxtd2.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:26 +0000 UTC Normal ReplicaSet.apps service-types-5bdc4647d5 SuccessfulCreate Created pod: service-types-5bdc4647d5-rxtd2 replicaset-controller logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:26 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-5bdc4647d5 to 1 deployment-controller logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:27 +0000 UTC Normal Pod service-types-5bdc4647d5-rxtd2.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:27 +0000 UTC Normal Pod service-types-5bdc4647d5-rxtd2.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:27 +0000 UTC Normal Pod service-types-5bdc4647d5-rxtd2.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:27 +0000 UTC Normal Pod service-types-5bdc4647d5-rxtd2.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:27 +0000 UTC Normal Pod service-types-5bdc4647d5-rxtd2.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:29 +0000 UTC Normal Service service-types-collector EnsuredLoadBalancer Ensured load balancer service-controller logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:29 +0000 UTC Normal Service service-types-query EnsuringLoadBalancer Ensuring load balancer service-controller logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:32 +0000 UTC Normal Service service-types-query EnsuredLoadBalancer Ensured load balancer service-controller logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:33 +0000 UTC Normal Pod service-types-5bdc4647d5-rxtd2.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:33 +0000 UTC Normal Pod service-types-5bdc4647d5-rxtd2.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:33 +0000 UTC Normal ReplicaSet.apps service-types-5bdc4647d5 SuccessfulDelete Deleted pod: service-types-5bdc4647d5-rxtd2 replicaset-controller logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:33 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled down replica set service-types-5bdc4647d5 to 0 from 1 deployment-controller logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:35 +0000 UTC Normal Pod service-types-846dcb65d8-96btz Binding Scheduled Successfully assigned kuttl-test-sacred-flamingo/service-types-846dcb65d8-96btz to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:35 +0000 UTC Normal ReplicaSet.apps service-types-846dcb65d8 SuccessfulCreate Created pod: service-types-846dcb65d8-96btz replicaset-controller logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:35 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-846dcb65d8 to 1 deployment-controller logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:36 +0000 UTC Normal Pod service-types-846dcb65d8-96btz AddedInterface Add eth0 [10.128.2.58/23] from ovn-kubernetes multus logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:36 +0000 UTC Normal Pod service-types-846dcb65d8-96btz.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:36 +0000 UTC Normal Pod service-types-846dcb65d8-96btz.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:36 +0000 UTC Normal Pod service-types-846dcb65d8-96btz.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:36 +0000 UTC Normal Pod service-types-846dcb65d8-96btz.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:36 +0000 UTC Normal Pod service-types-846dcb65d8-96btz.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:36 +0000 UTC Normal Pod service-types-846dcb65d8-96btz.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:40 +0000 UTC Normal Pod check-span-cdcc4 Binding Scheduled Successfully assigned kuttl-test-sacred-flamingo/check-span-cdcc4 to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:40 +0000 UTC Normal Pod check-span-cdcc4 AddedInterface Add eth0 [10.129.2.55/23] from ovn-kubernetes multus logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:40 +0000 UTC Normal Pod check-span-cdcc4.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:40 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-cdcc4 job-controller logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:40 +0000 UTC Normal Pod report-span-thm85 Binding Scheduled Successfully assigned kuttl-test-sacred-flamingo/report-span-thm85 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:40 +0000 UTC Normal Pod report-span-thm85 AddedInterface Add eth0 [10.131.0.71/23] from ovn-kubernetes multus logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:40 +0000 UTC Normal Pod report-span-thm85.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:40 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-thm85 job-controller logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:43 +0000 UTC Normal Pod report-span-thm85.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 2.226s (2.226s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:43 +0000 UTC Normal Pod report-span-thm85.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:43 +0000 UTC Normal Pod report-span-thm85.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:45 +0000 UTC Normal Pod check-span-cdcc4.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 4.492s (4.492s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:45 +0000 UTC Normal Pod check-span-cdcc4.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:45 +0000 UTC Normal Pod check-span-cdcc4.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 14:00:49 | examples-service-types | 2024-12-02 14:00:48 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 14:00:49 | examples-service-types | Deleting namespace: kuttl-test-sacred-flamingo === CONT kuttl/harness/examples-openshift-with-htpasswd logger.go:42: 14:01:03 | examples-openshift-with-htpasswd | Ignoring 00-install.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 14:01:03 | examples-openshift-with-htpasswd | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 14:01:03 | examples-openshift-with-htpasswd | Creating namespace: kuttl-test-exciting-goblin logger.go:42: 14:01:03 | examples-openshift-with-htpasswd/0-install | starting test step 0-install logger.go:42: 14:01:03 | examples-openshift-with-htpasswd/0-install | Secret:kuttl-test-exciting-goblin/htpasswd created logger.go:42: 14:01:03 | examples-openshift-with-htpasswd/0-install | test step completed 0-install logger.go:42: 14:01:03 | examples-openshift-with-htpasswd/1-install | starting test step 1-install logger.go:42: 14:01:03 | examples-openshift-with-htpasswd/1-install | Jaeger:kuttl-test-exciting-goblin/with-htpasswd created logger.go:42: 14:01:10 | examples-openshift-with-htpasswd/1-install | test step completed 1-install logger.go:42: 14:01:10 | examples-openshift-with-htpasswd/2-check-unsecured | starting test step 2-check-unsecured logger.go:42: 14:01:10 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [./ensure-ingress-host.sh] logger.go:42: 14:01:10 | examples-openshift-with-htpasswd/2-check-unsecured | Checking the Ingress host value was populated logger.go:42: 14:01:10 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 0 logger.go:42: 14:01:10 | examples-openshift-with-htpasswd/2-check-unsecured | Hostname is with-htpasswd-kuttl-test-exciting-goblin.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com logger.go:42: 14:01:10 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 14:01:11 | examples-openshift-with-htpasswd/2-check-unsecured | Checking an expected HTTP response logger.go:42: 14:01:11 | examples-openshift-with-htpasswd/2-check-unsecured | Running in OpenShift logger.go:42: 14:01:11 | examples-openshift-with-htpasswd/2-check-unsecured | Not using any secret logger.go:42: 14:01:11 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 1/30 the https://with-htpasswd-kuttl-test-exciting-goblin.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search logger.go:42: 14:01:11 | examples-openshift-with-htpasswd/2-check-unsecured | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 14:01:11 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 2/30 the https://with-htpasswd-kuttl-test-exciting-goblin.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search logger.go:42: 14:01:11 | examples-openshift-with-htpasswd/2-check-unsecured | HTTP response is 503. 403 expected. Waiting 10 s logger.go:42: 14:01:21 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 3/30 the https://with-htpasswd-kuttl-test-exciting-goblin.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search logger.go:42: 14:01:21 | examples-openshift-with-htpasswd/2-check-unsecured | curl response asserted properly logger.go:42: 14:01:21 | examples-openshift-with-htpasswd/2-check-unsecured | test step completed 2-check-unsecured logger.go:42: 14:01:21 | examples-openshift-with-htpasswd/3-check-unauthorized | starting test step 3-check-unauthorized logger.go:42: 14:01:21 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [./ensure-ingress-host.sh] logger.go:42: 14:01:21 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking the Ingress host value was populated logger.go:42: 14:01:21 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 0 logger.go:42: 14:01:21 | examples-openshift-with-htpasswd/3-check-unauthorized | Hostname is with-htpasswd-kuttl-test-exciting-goblin.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com logger.go:42: 14:01:21 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [sh -c JAEGER_USERNAME=wronguser JAEGER_PASSWORD=wrongpassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 14:01:21 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking an expected HTTP response logger.go:42: 14:01:21 | examples-openshift-with-htpasswd/3-check-unauthorized | Running in OpenShift logger.go:42: 14:01:21 | examples-openshift-with-htpasswd/3-check-unauthorized | Using Jaeger basic authentication logger.go:42: 14:01:21 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 1/30 the https://with-htpasswd-kuttl-test-exciting-goblin.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search logger.go:42: 14:01:21 | examples-openshift-with-htpasswd/3-check-unauthorized | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 14:01:21 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 2/30 the https://with-htpasswd-kuttl-test-exciting-goblin.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search logger.go:42: 14:01:22 | examples-openshift-with-htpasswd/3-check-unauthorized | curl response asserted properly logger.go:42: 14:01:22 | examples-openshift-with-htpasswd/3-check-unauthorized | test step completed 3-check-unauthorized logger.go:42: 14:01:22 | examples-openshift-with-htpasswd/4-check-authorized | starting test step 4-check-authorized logger.go:42: 14:01:22 | examples-openshift-with-htpasswd/4-check-authorized | running command: [./ensure-ingress-host.sh] logger.go:42: 14:01:22 | examples-openshift-with-htpasswd/4-check-authorized | Checking the Ingress host value was populated logger.go:42: 14:01:22 | examples-openshift-with-htpasswd/4-check-authorized | Try number 0 logger.go:42: 14:01:22 | examples-openshift-with-htpasswd/4-check-authorized | Hostname is with-htpasswd-kuttl-test-exciting-goblin.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com logger.go:42: 14:01:22 | examples-openshift-with-htpasswd/4-check-authorized | running command: [sh -c JAEGER_USERNAME=awesomeuser JAEGER_PASSWORD=awesomepassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE with-htpasswd] logger.go:42: 14:01:22 | examples-openshift-with-htpasswd/4-check-authorized | Checking an expected HTTP response logger.go:42: 14:01:22 | examples-openshift-with-htpasswd/4-check-authorized | Running in OpenShift logger.go:42: 14:01:22 | examples-openshift-with-htpasswd/4-check-authorized | Using Jaeger basic authentication logger.go:42: 14:01:22 | examples-openshift-with-htpasswd/4-check-authorized | Try number 1/30 the https://with-htpasswd-kuttl-test-exciting-goblin.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search logger.go:42: 14:01:22 | examples-openshift-with-htpasswd/4-check-authorized | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 14:01:22 | examples-openshift-with-htpasswd/4-check-authorized | Try number 2/30 the https://with-htpasswd-kuttl-test-exciting-goblin.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search logger.go:42: 14:01:22 | examples-openshift-with-htpasswd/4-check-authorized | curl response asserted properly logger.go:42: 14:01:22 | examples-openshift-with-htpasswd/4-check-authorized | test step completed 4-check-authorized logger.go:42: 14:01:22 | examples-openshift-with-htpasswd | examples-openshift-with-htpasswd events from ns kuttl-test-exciting-goblin: logger.go:42: 14:01:22 | examples-openshift-with-htpasswd | 2024-12-02 14:01:07 +0000 UTC Normal Pod with-htpasswd-cc66cc7c7-vmkrj Binding Scheduled Successfully assigned kuttl-test-exciting-goblin/with-htpasswd-cc66cc7c7-vmkrj to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:01:22 | examples-openshift-with-htpasswd | 2024-12-02 14:01:07 +0000 UTC Warning Pod with-htpasswd-cc66cc7c7-vmkrj FailedMount MountVolume.SetUp failed for volume "with-htpasswd-ui-oauth-proxy-tls" : secret "with-htpasswd-ui-oauth-proxy-tls" not found kubelet logger.go:42: 14:01:22 | examples-openshift-with-htpasswd | 2024-12-02 14:01:07 +0000 UTC Normal ReplicaSet.apps with-htpasswd-cc66cc7c7 SuccessfulCreate Created pod: with-htpasswd-cc66cc7c7-vmkrj replicaset-controller logger.go:42: 14:01:22 | examples-openshift-with-htpasswd | 2024-12-02 14:01:07 +0000 UTC Normal Deployment.apps with-htpasswd ScalingReplicaSet Scaled up replica set with-htpasswd-cc66cc7c7 to 1 deployment-controller logger.go:42: 14:01:22 | examples-openshift-with-htpasswd | 2024-12-02 14:01:08 +0000 UTC Normal Pod with-htpasswd-cc66cc7c7-vmkrj AddedInterface Add eth0 [10.128.2.59/23] from ovn-kubernetes multus logger.go:42: 14:01:22 | examples-openshift-with-htpasswd | 2024-12-02 14:01:08 +0000 UTC Normal Pod with-htpasswd-cc66cc7c7-vmkrj.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 14:01:22 | examples-openshift-with-htpasswd | 2024-12-02 14:01:08 +0000 UTC Normal Pod with-htpasswd-cc66cc7c7-vmkrj.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 14:01:22 | examples-openshift-with-htpasswd | 2024-12-02 14:01:08 +0000 UTC Normal Pod with-htpasswd-cc66cc7c7-vmkrj.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 14:01:22 | examples-openshift-with-htpasswd | 2024-12-02 14:01:08 +0000 UTC Normal Pod with-htpasswd-cc66cc7c7-vmkrj.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 14:01:22 | examples-openshift-with-htpasswd | 2024-12-02 14:01:09 +0000 UTC Normal Pod with-htpasswd-cc66cc7c7-vmkrj.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 14:01:22 | examples-openshift-with-htpasswd | 2024-12-02 14:01:09 +0000 UTC Normal Pod with-htpasswd-cc66cc7c7-vmkrj.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 14:01:22 | examples-openshift-with-htpasswd | Deleting namespace: kuttl-test-exciting-goblin === CONT kuttl/harness/examples-all-in-one-with-options logger.go:42: 14:01:29 | examples-all-in-one-with-options | Creating namespace: kuttl-test-tidy-duckling logger.go:42: 14:01:29 | examples-all-in-one-with-options/0-install | starting test step 0-install logger.go:42: 14:01:29 | examples-all-in-one-with-options/0-install | Jaeger:kuttl-test-tidy-duckling/my-jaeger created logger.go:42: 14:01:36 | examples-all-in-one-with-options/0-install | test step completed 0-install logger.go:42: 14:01:36 | examples-all-in-one-with-options/1-smoke-test | starting test step 1-smoke-test logger.go:42: 14:01:36 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 14:01:38 | examples-all-in-one-with-options/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 14:01:46 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443/jaeger MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 14:01:46 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 14:01:47 | examples-all-in-one-with-options/1-smoke-test | job.batch/report-span created logger.go:42: 14:01:47 | examples-all-in-one-with-options/1-smoke-test | job.batch/check-span created logger.go:42: 14:01:56 | examples-all-in-one-with-options/1-smoke-test | test step completed 1-smoke-test logger.go:42: 14:01:56 | examples-all-in-one-with-options | examples-all-in-one-with-options events from ns kuttl-test-tidy-duckling: logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:33 +0000 UTC Normal Pod my-jaeger-647b85c945-4nds2 Binding Scheduled Successfully assigned kuttl-test-tidy-duckling/my-jaeger-647b85c945-4nds2 to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:33 +0000 UTC Warning Pod my-jaeger-647b85c945-4nds2 FailedMount MountVolume.SetUp failed for volume "my-jaeger-ui-oauth-proxy-tls" : secret "my-jaeger-ui-oauth-proxy-tls" not found kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:33 +0000 UTC Normal ReplicaSet.apps my-jaeger-647b85c945 SuccessfulCreate Created pod: my-jaeger-647b85c945-4nds2 replicaset-controller logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:33 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-647b85c945 to 1 deployment-controller logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:34 +0000 UTC Normal Pod my-jaeger-647b85c945-4nds2 AddedInterface Add eth0 [10.128.2.60/23] from ovn-kubernetes multus logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:34 +0000 UTC Normal Pod my-jaeger-647b85c945-4nds2.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:34 +0000 UTC Normal Pod my-jaeger-647b85c945-4nds2.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:35 +0000 UTC Normal Pod my-jaeger-647b85c945-4nds2.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:35 +0000 UTC Normal Pod my-jaeger-647b85c945-4nds2.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:35 +0000 UTC Normal Pod my-jaeger-647b85c945-4nds2.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:35 +0000 UTC Normal Pod my-jaeger-647b85c945-4nds2.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:43 +0000 UTC Normal Pod my-jaeger-647b85c945-4nds2.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:43 +0000 UTC Normal Pod my-jaeger-647b85c945-4nds2.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:43 +0000 UTC Normal ReplicaSet.apps my-jaeger-647b85c945 SuccessfulDelete Deleted pod: my-jaeger-647b85c945-4nds2 replicaset-controller logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:43 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-647b85c945 to 0 from 1 deployment-controller logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:44 +0000 UTC Normal Pod my-jaeger-695c59b96-q69gz Binding Scheduled Successfully assigned kuttl-test-tidy-duckling/my-jaeger-695c59b96-q69gz to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:44 +0000 UTC Normal Pod my-jaeger-695c59b96-q69gz AddedInterface Add eth0 [10.128.2.61/23] from ovn-kubernetes multus logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:44 +0000 UTC Normal ReplicaSet.apps my-jaeger-695c59b96 SuccessfulCreate Created pod: my-jaeger-695c59b96-q69gz replicaset-controller logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:44 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-695c59b96 to 1 deployment-controller logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:45 +0000 UTC Normal Pod my-jaeger-695c59b96-q69gz.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:45 +0000 UTC Normal Pod my-jaeger-695c59b96-q69gz.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:45 +0000 UTC Normal Pod my-jaeger-695c59b96-q69gz.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:45 +0000 UTC Normal Pod my-jaeger-695c59b96-q69gz.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:45 +0000 UTC Normal Pod my-jaeger-695c59b96-q69gz.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:45 +0000 UTC Normal Pod my-jaeger-695c59b96-q69gz.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:47 +0000 UTC Normal Pod check-span-4b779 Binding Scheduled Successfully assigned kuttl-test-tidy-duckling/check-span-4b779 to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:47 +0000 UTC Normal Pod check-span-4b779 AddedInterface Add eth0 [10.129.2.56/23] from ovn-kubernetes multus logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:47 +0000 UTC Normal Pod check-span-4b779.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:47 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-4b779 job-controller logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:47 +0000 UTC Normal Pod report-span-gv467 Binding Scheduled Successfully assigned kuttl-test-tidy-duckling/report-span-gv467 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:47 +0000 UTC Normal Pod report-span-gv467 AddedInterface Add eth0 [10.131.0.72/23] from ovn-kubernetes multus logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:47 +0000 UTC Normal Pod report-span-gv467.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:47 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-gv467 job-controller logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:50 +0000 UTC Normal Pod report-span-gv467.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 3.062s (3.062s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:50 +0000 UTC Normal Pod report-span-gv467.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:50 +0000 UTC Normal Pod report-span-gv467.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:52 +0000 UTC Normal Pod check-span-4b779.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 4.821s (4.821s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:52 +0000 UTC Normal Pod check-span-4b779.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:52 +0000 UTC Normal Pod check-span-4b779.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 14:01:56 | examples-all-in-one-with-options | 2024-12-02 14:01:55 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 14:01:56 | examples-all-in-one-with-options | Deleting namespace: kuttl-test-tidy-duckling === CONT kuttl/harness/examples-agent-with-priority-class logger.go:42: 14:02:08 | examples-agent-with-priority-class | Creating namespace: kuttl-test-chief-weasel logger.go:42: 14:02:08 | examples-agent-with-priority-class/0-install | starting test step 0-install logger.go:42: 14:02:09 | examples-agent-with-priority-class/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 14:02:09 | examples-agent-with-priority-class/0-install | ServiceAccount:kuttl-test-chief-weasel/jaeger-agent-daemonset created logger.go:42: 14:02:09 | examples-agent-with-priority-class/0-install | test step completed 0-install logger.go:42: 14:02:09 | examples-agent-with-priority-class/1-add-policy | starting test step 1-add-policy logger.go:42: 14:02:09 | examples-agent-with-priority-class/1-add-policy | running command: [sh -c oc adm policy --namespace $NAMESPACE add-scc-to-user daemonset-with-hostport -z jaeger-agent-daemonset] logger.go:42: 14:02:09 | examples-agent-with-priority-class/1-add-policy | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:daemonset-with-hostport added: "jaeger-agent-daemonset" logger.go:42: 14:02:09 | examples-agent-with-priority-class/1-add-policy | running command: [sh -c sleep 5] logger.go:42: 14:02:14 | examples-agent-with-priority-class/1-add-policy | test step completed 1-add-policy logger.go:42: 14:02:14 | examples-agent-with-priority-class/2-install | starting test step 2-install logger.go:42: 14:02:14 | examples-agent-with-priority-class/2-install | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 14:02:22 | examples-agent-with-priority-class/2-install | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 14:02:22 | examples-agent-with-priority-class/2-install | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 14:02:23 | examples-agent-with-priority-class/2-install | job.batch/report-span created logger.go:42: 14:02:23 | examples-agent-with-priority-class/2-install | job.batch/check-span created logger.go:42: 14:02:23 | examples-agent-with-priority-class/2-install | PriorityClass:/high-priority created logger.go:42: 14:02:23 | examples-agent-with-priority-class/2-install | Jaeger:kuttl-test-chief-weasel/agent-as-daemonset updated logger.go:42: 14:02:36 | examples-agent-with-priority-class/2-install | test step completed 2-install logger.go:42: 14:02:36 | examples-agent-with-priority-class | examples-agent-with-priority-class events from ns kuttl-test-chief-weasel: logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:19 +0000 UTC Normal Pod agent-as-daemonset-c45f6bb84-xr46d Binding Scheduled Successfully assigned kuttl-test-chief-weasel/agent-as-daemonset-c45f6bb84-xr46d to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:19 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-c45f6bb84 SuccessfulCreate Created pod: agent-as-daemonset-c45f6bb84-xr46d replicaset-controller logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:19 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-c45f6bb84 to 1 deployment-controller logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:20 +0000 UTC Warning Pod agent-as-daemonset-c45f6bb84-xr46d FailedMount MountVolume.SetUp failed for volume "agent-as-daemonset-ui-oauth-proxy-tls" : secret "agent-as-daemonset-ui-oauth-proxy-tls" not found kubelet logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:21 +0000 UTC Normal Pod agent-as-daemonset-c45f6bb84-xr46d AddedInterface Add eth0 [10.128.2.62/23] from ovn-kubernetes multus logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:21 +0000 UTC Normal Pod agent-as-daemonset-c45f6bb84-xr46d.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:21 +0000 UTC Normal Pod agent-as-daemonset-c45f6bb84-xr46d.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:21 +0000 UTC Normal Pod agent-as-daemonset-c45f6bb84-xr46d.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:21 +0000 UTC Normal Pod agent-as-daemonset-c45f6bb84-xr46d.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:21 +0000 UTC Normal Pod agent-as-daemonset-c45f6bb84-xr46d.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:21 +0000 UTC Normal Pod agent-as-daemonset-c45f6bb84-xr46d.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:23 +0000 UTC Normal Pod check-span-kwh5s Binding Scheduled Successfully assigned kuttl-test-chief-weasel/check-span-kwh5s to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:23 +0000 UTC Normal Pod check-span-kwh5s AddedInterface Add eth0 [10.129.2.57/23] from ovn-kubernetes multus logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:23 +0000 UTC Normal Pod check-span-kwh5s.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:23 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-kwh5s job-controller logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:23 +0000 UTC Normal Pod report-span-k4n25 Binding Scheduled Successfully assigned kuttl-test-chief-weasel/report-span-k4n25 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:23 +0000 UTC Normal Pod report-span-k4n25 AddedInterface Add eth0 [10.131.0.73/23] from ovn-kubernetes multus logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:23 +0000 UTC Normal Pod report-span-k4n25.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:23 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-k4n25 job-controller logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:24 +0000 UTC Normal Pod check-span-kwh5s.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 800ms (800ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:24 +0000 UTC Normal Pod check-span-kwh5s.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:24 +0000 UTC Normal Pod check-span-kwh5s.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:25 +0000 UTC Warning DaemonSet.apps agent-as-daemonset-agent-daemonset FailedCreate Error creating: pods "agent-as-daemonset-agent-daemonset-" is forbidden: unable to validate against any security context constraint: [provider "anyuid": Forbidden: not usable by user or serviceaccount, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5775: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5778: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6831: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6832: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 14271: Host ports are not allowed to be used, provider "restricted": Forbidden: not usable by user or serviceaccount, provider "nonroot-v2": Forbidden: not usable by user or serviceaccount, provider "nonroot": Forbidden: not usable by user or serviceaccount, provider "hostmount-anyuid": Forbidden: not usable by user or serviceaccount, provider "elasticsearch-scc": Forbidden: not usable by user or serviceaccount, provider "machine-api-termination-handler": Forbidden: not usable by user or serviceaccount, provider "daemonset-with-hostport": Forbidden: not usable by user or serviceaccount, provider "hostnetwork-v2": Forbidden: not usable by user or serviceaccount, provider "hostnetwork": Forbidden: not usable by user or serviceaccount, provider "hostaccess": Forbidden: not usable by user or serviceaccount, provider "node-exporter": Forbidden: not usable by user or serviceaccount, provider "privileged": Forbidden: not usable by user or serviceaccount] daemonset-controller logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:25 +0000 UTC Normal Pod report-span-k4n25.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 2.012s (2.012s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:25 +0000 UTC Normal Pod report-span-k4n25.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:26 +0000 UTC Normal Pod report-span-k4n25.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 14:02:36 | examples-agent-with-priority-class | 2024-12-02 14:02:35 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 14:02:36 | examples-agent-with-priority-class | Deleting namespace: kuttl-test-chief-weasel === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (714.42s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.36s) --- PASS: kuttl/harness/examples-simple-prod (87.35s) --- PASS: kuttl/harness/examples-with-sampling (195.04s) --- PASS: kuttl/harness/examples-with-cassandra (57.16s) --- PASS: kuttl/harness/examples-with-badger (42.22s) --- PASS: kuttl/harness/examples-simplest (42.28s) --- PASS: kuttl/harness/examples-simple-prod-with-volumes (83.58s) --- PASS: kuttl/harness/examples-business-application-injected-sidecar (51.55s) --- PASS: kuttl/harness/examples-service-types (41.67s) --- PASS: kuttl/harness/examples-openshift-with-htpasswd (26.09s) --- PASS: kuttl/harness/examples-all-in-one-with-options (39.53s) --- PASS: kuttl/harness/examples-agent-with-priority-class (41.41s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name examples --report --output /logs/artifacts/examples.xml ./artifacts/kuttl-report.xml time="2024-12-02T14:02:51Z" level=debug msg="Setting a new name for the test suites" time="2024-12-02T14:02:51Z" level=debug msg="Removing 'artifacts' TestCase" time="2024-12-02T14:02:51Z" level=debug msg="normalizing test case names" time="2024-12-02T14:02:51Z" level=debug msg="examples/artifacts -> examples_artifacts" time="2024-12-02T14:02:51Z" level=debug msg="examples/examples-simple-prod -> examples_examples_simple_prod" time="2024-12-02T14:02:51Z" level=debug msg="examples/examples-with-sampling -> examples_examples_with_sampling" time="2024-12-02T14:02:51Z" level=debug msg="examples/examples-with-cassandra -> examples_examples_with_cassandra" time="2024-12-02T14:02:51Z" level=debug msg="examples/examples-with-badger -> examples_examples_with_badger" time="2024-12-02T14:02:51Z" level=debug msg="examples/examples-simplest -> examples_examples_simplest" time="2024-12-02T14:02:51Z" level=debug msg="examples/examples-simple-prod-with-volumes -> examples_examples_simple_prod_with_volumes" time="2024-12-02T14:02:51Z" level=debug msg="examples/examples-business-application-injected-sidecar -> examples_examples_business_application_injected_sidecar" time="2024-12-02T14:02:51Z" level=debug msg="examples/examples-service-types -> examples_examples_service_types" time="2024-12-02T14:02:51Z" level=debug msg="examples/examples-openshift-with-htpasswd -> examples_examples_openshift_with_htpasswd" time="2024-12-02T14:02:51Z" level=debug msg="examples/examples-all-in-one-with-options -> examples_examples_all_in_one_with_options" time="2024-12-02T14:02:51Z" level=debug msg="examples/examples-agent-with-priority-class -> examples_examples_agent_with_priority_class" +---------------------------------------------------------+--------+ | NAME | RESULT | +---------------------------------------------------------+--------+ | examples_artifacts | passed | | examples_examples_simple_prod | passed | | examples_examples_with_sampling | passed | | examples_examples_with_cassandra | passed | | examples_examples_with_badger | passed | | examples_examples_simplest | passed | | examples_examples_simple_prod_with_volumes | passed | | examples_examples_business_application_injected_sidecar | passed | | examples_examples_service_types | passed | | examples_examples_openshift_with_htpasswd | passed | | examples_examples_all_in_one_with_options | passed | | examples_examples_agent_with_priority_class | passed | +---------------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh generate false true + '[' 3 -ne 3 ']' + test_suite_name=generate + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/generate.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-generate make[2]: Entering directory '/tmp/jaeger-tests' test -s /tmp/jaeger-tests/bin/operator-sdk || curl -sLo /tmp/jaeger-tests/bin/operator-sdk https://github.com/operator-framework/operator-sdk/releases/download/v1.32.0/operator-sdk_`go env GOOS`_`go env GOARCH` ./hack/install/install-golangci-lint.sh Installing golangci-lint golangci-lint 1.55.2 is installed already ./hack/install/install-goimports.sh Installing goimports Try 0... go install golang.org/x/tools/cmd/goimports@v0.1.12 >>>> Formatting code... ./.ci/format.sh >>>> Building... ./hack/install/install-dependencies.sh Installing go dependencies Try 0... go mod download GOOS= GOARCH= CGO_ENABLED=0 GO111MODULE=on go build -ldflags "-X "github.com/jaegertracing/jaeger-operator/pkg/version".version="1.62.0" -X "github.com/jaegertracing/jaeger-operator/pkg/version".buildDate=2024-12-02T14:02:53Z -X "github.com/jaegertracing/jaeger-operator/pkg/version".defaultJaeger="1.62.0"" -o "bin/jaeger-operator" main.go JAEGER_VERSION="1.62.0" ./tests/e2e/generate/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.18.0-0.nightly-2024-11-30-141716 True False 54m Cluster version is 4.18.0-0.nightly-2024-11-30-141716' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.18.0-0.nightly-2024-11-30-141716 True False 54m Cluster version is 4.18.0-0.nightly-2024-11-30-141716' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/generate/render.sh ++ export SUITE_DIR=./tests/e2e/generate ++ SUITE_DIR=./tests/e2e/generate ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/generate ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + '[' true = true ']' + skip_test generate 'This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 2 -ne 2 ']' + test_name=generate + message='This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/generate/_build + '[' _build '!=' _build ']' + rm -rf generate + warning 'generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed\e[0m' WAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running generate E2E tests' Running generate E2E tests + cd tests/e2e/generate/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1113221797 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 14:03:12 | artifacts | Creating namespace: kuttl-test-wanted-vervet logger.go:42: 14:03:12 | artifacts | artifacts events from ns kuttl-test-wanted-vervet: logger.go:42: 14:03:12 | artifacts | Deleting namespace: kuttl-test-wanted-vervet === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (6.54s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.33s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name generate --report --output /logs/artifacts/generate.xml ./artifacts/kuttl-report.xml time="2024-12-02T14:03:18Z" level=debug msg="Setting a new name for the test suites" time="2024-12-02T14:03:18Z" level=debug msg="Removing 'artifacts' TestCase" time="2024-12-02T14:03:18Z" level=debug msg="normalizing test case names" time="2024-12-02T14:03:18Z" level=debug msg="generate/artifacts -> generate_artifacts" +--------------------+--------+ | NAME | RESULT | +--------------------+--------+ | generate_artifacts | passed | +--------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh upgrade false true + '[' 3 -ne 3 ']' + test_suite_name=upgrade + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/upgrade.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-upgrade make[2]: Entering directory '/tmp/jaeger-tests' make docker JAEGER_VERSION=1.62.1 IMG="quay.io//jaeger-operator:next" make[3]: Entering directory '/tmp/jaeger-tests' [ ! -z "true" ] || docker build --build-arg=GOPROXY= --build-arg=VERSION="1.62.0" --build-arg=JAEGER_VERSION=1.62.1 --build-arg=TARGETARCH= --build-arg VERSION_DATE=2024-12-02T14:03:19Z --build-arg VERSION_PKG="github.com/jaegertracing/jaeger-operator/pkg/version" -t "quay.io//jaeger-operator:next" . make[3]: Leaving directory '/tmp/jaeger-tests' touch build-e2e-upgrade-image SKIP_ES_EXTERNAL=true IMG=quay.io//jaeger-operator:"1.62.0" JAEGER_OPERATOR_VERSION="1.62.0" JAEGER_VERSION="1.62.0" ./tests/e2e/upgrade/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.18.0-0.nightly-2024-11-30-141716 True False 54m Cluster version is 4.18.0-0.nightly-2024-11-30-141716' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.18.0-0.nightly-2024-11-30-141716 True False 54m Cluster version is 4.18.0-0.nightly-2024-11-30-141716' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/upgrade/render.sh ++ export SUITE_DIR=./tests/e2e/upgrade ++ SUITE_DIR=./tests/e2e/upgrade ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/upgrade ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + export JAEGER_NAME + '[' true = true ']' + skip_test upgrade 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade + warning 'upgrade: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade: Test not supported in OpenShift\e[0m' WAR: upgrade: Test not supported in OpenShift + '[' true = true ']' + skip_test upgrade-from-latest-release 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade-from-latest-release + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade-from-latest-release + warning 'upgrade-from-latest-release: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade-from-latest-release: Test not supported in OpenShift\e[0m' WAR: upgrade-from-latest-release: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running upgrade E2E tests' Running upgrade E2E tests + cd tests/e2e/upgrade/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1113221797 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 14:03:20 | artifacts | Creating namespace: kuttl-test-central-ladybird logger.go:42: 14:03:20 | artifacts | artifacts events from ns kuttl-test-central-ladybird: logger.go:42: 14:03:20 | artifacts | Deleting namespace: kuttl-test-central-ladybird === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (6.12s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.91s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name upgrade --report --output /logs/artifacts/upgrade.xml ./artifacts/kuttl-report.xml time="2024-12-02T14:03:26Z" level=debug msg="Setting a new name for the test suites" time="2024-12-02T14:03:26Z" level=debug msg="Removing 'artifacts' TestCase" time="2024-12-02T14:03:26Z" level=debug msg="normalizing test case names" time="2024-12-02T14:03:26Z" level=debug msg="upgrade/artifacts -> upgrade_artifacts" +-------------------+--------+ | NAME | RESULT | +-------------------+--------+ | upgrade_artifacts | passed | +-------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh sidecar false true + '[' 3 -ne 3 ']' + test_suite_name=sidecar + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/sidecar.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-sidecar make[2]: Entering directory '/tmp/jaeger-tests' ./tests/e2e/sidecar/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.18.0-0.nightly-2024-11-30-141716 True False 55m Cluster version is 4.18.0-0.nightly-2024-11-30-141716' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.18.0-0.nightly-2024-11-30-141716 True False 55m Cluster version is 4.18.0-0.nightly-2024-11-30-141716' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/sidecar/render.sh ++ export SUITE_DIR=./tests/e2e/sidecar ++ SUITE_DIR=./tests/e2e/sidecar ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/sidecar ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + jaeger_service_name=order + start_test sidecar-deployment + '[' 1 -ne 1 ']' + test_name=sidecar-deployment + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-deployment' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-deployment\e[0m' Rendering files for test sidecar-deployment + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build + '[' _build '!=' _build ']' + mkdir -p sidecar-deployment + cd sidecar-deployment + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-namespace + '[' 1 -ne 1 ']' + test_name=sidecar-namespace + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-namespace' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-namespace\e[0m' Rendering files for test sidecar-namespace + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-deployment + '[' sidecar-deployment '!=' _build ']' + cd .. + mkdir -p sidecar-namespace + cd sidecar-namespace + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-skip-webhook + '[' 1 -ne 1 ']' + test_name=sidecar-skip-webhook + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-skip-webhook' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-skip-webhook\e[0m' Rendering files for test sidecar-skip-webhook + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-namespace + '[' sidecar-namespace '!=' _build ']' + cd .. + mkdir -p sidecar-skip-webhook + cd sidecar-skip-webhook + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running sidecar E2E tests' Running sidecar E2E tests + cd tests/e2e/sidecar/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1113221797 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 4 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/sidecar-deployment === PAUSE kuttl/harness/sidecar-deployment === RUN kuttl/harness/sidecar-namespace === PAUSE kuttl/harness/sidecar-namespace === RUN kuttl/harness/sidecar-skip-webhook === PAUSE kuttl/harness/sidecar-skip-webhook === CONT kuttl/harness/artifacts logger.go:42: 14:03:34 | artifacts | Creating namespace: kuttl-test-enormous-horse logger.go:42: 14:03:34 | artifacts | artifacts events from ns kuttl-test-enormous-horse: logger.go:42: 14:03:34 | artifacts | Deleting namespace: kuttl-test-enormous-horse === CONT kuttl/harness/sidecar-namespace logger.go:42: 14:03:41 | sidecar-namespace | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 14:03:41 | sidecar-namespace | Creating namespace: kuttl-test-wealthy-cattle logger.go:42: 14:03:41 | sidecar-namespace/0-install | starting test step 0-install logger.go:42: 14:03:41 | sidecar-namespace/0-install | Jaeger:kuttl-test-wealthy-cattle/agent-as-sidecar created logger.go:42: 14:03:48 | sidecar-namespace/0-install | test step completed 0-install logger.go:42: 14:03:48 | sidecar-namespace/1-install | starting test step 1-install logger.go:42: 14:03:48 | sidecar-namespace/1-install | Deployment:kuttl-test-wealthy-cattle/vertx-create-span-sidecar created logger.go:42: 14:03:49 | sidecar-namespace/1-install | test step completed 1-install logger.go:42: 14:03:49 | sidecar-namespace/2-enable-injection | starting test step 2-enable-injection logger.go:42: 14:03:49 | sidecar-namespace/2-enable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="true"] logger.go:42: 14:03:49 | sidecar-namespace/2-enable-injection | namespace/kuttl-test-wealthy-cattle annotated logger.go:42: 14:03:55 | sidecar-namespace/2-enable-injection | test step completed 2-enable-injection logger.go:42: 14:03:55 | sidecar-namespace/3-find-service | starting test step 3-find-service logger.go:42: 14:03:55 | sidecar-namespace/3-find-service | Job:kuttl-test-wealthy-cattle/00-find-service created logger.go:42: 14:04:01 | sidecar-namespace/3-find-service | test step completed 3-find-service logger.go:42: 14:04:01 | sidecar-namespace/4-other-instance | starting test step 4-other-instance logger.go:42: 14:04:01 | sidecar-namespace/4-other-instance | Jaeger:kuttl-test-wealthy-cattle/agent-as-sidecar2 created logger.go:42: 14:04:08 | sidecar-namespace/4-other-instance | test step completed 4-other-instance logger.go:42: 14:04:08 | sidecar-namespace/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 14:04:08 | sidecar-namespace/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 14:04:08 | sidecar-namespace/6-find-service | starting test step 6-find-service logger.go:42: 14:04:08 | sidecar-namespace/6-find-service | Job:kuttl-test-wealthy-cattle/01-find-service created logger.go:42: 14:04:29 | sidecar-namespace/6-find-service | test step completed 6-find-service logger.go:42: 14:04:29 | sidecar-namespace/7-disable-injection | starting test step 7-disable-injection logger.go:42: 14:04:29 | sidecar-namespace/7-disable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="false"] logger.go:42: 14:04:29 | sidecar-namespace/7-disable-injection | namespace/kuttl-test-wealthy-cattle annotated logger.go:42: 14:04:34 | sidecar-namespace/7-disable-injection | test step completed 7-disable-injection logger.go:42: 14:04:35 | sidecar-namespace | sidecar-namespace events from ns kuttl-test-wealthy-cattle: logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:45 +0000 UTC Normal Pod agent-as-sidecar-567f964dfd-5mh9k Binding Scheduled Successfully assigned kuttl-test-wealthy-cattle/agent-as-sidecar-567f964dfd-5mh9k to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:45 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-567f964dfd SuccessfulCreate Created pod: agent-as-sidecar-567f964dfd-5mh9k replicaset-controller logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:45 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-567f964dfd to 1 deployment-controller logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:46 +0000 UTC Normal Pod agent-as-sidecar-567f964dfd-5mh9k AddedInterface Add eth0 [10.128.2.63/23] from ovn-kubernetes multus logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:46 +0000 UTC Normal Pod agent-as-sidecar-567f964dfd-5mh9k.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:46 +0000 UTC Normal Pod agent-as-sidecar-567f964dfd-5mh9k.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:46 +0000 UTC Normal Pod agent-as-sidecar-567f964dfd-5mh9k.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:48 +0000 UTC Normal Pod vertx-create-span-sidecar-797645c8fc-226pn Binding Scheduled Successfully assigned kuttl-test-wealthy-cattle/vertx-create-span-sidecar-797645c8fc-226pn to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:48 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-797645c8fc SuccessfulCreate Created pod: vertx-create-span-sidecar-797645c8fc-226pn replicaset-controller logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:48 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-797645c8fc to 1 deployment-controller logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:49 +0000 UTC Normal Pod vertx-create-span-sidecar-797645c8fc-226pn AddedInterface Add eth0 [10.131.0.74/23] from ovn-kubernetes multus logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:49 +0000 UTC Normal Pod vertx-create-span-sidecar-797645c8fc-226pn.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:49 +0000 UTC Normal Pod vertx-create-span-sidecar-797645c8fc-226pn.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:49 +0000 UTC Normal Pod vertx-create-span-sidecar-797645c8fc-226pn.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:50 +0000 UTC Normal Pod vertx-create-span-sidecar-7fd767f857-lp7kl Binding Scheduled Successfully assigned kuttl-test-wealthy-cattle/vertx-create-span-sidecar-7fd767f857-lp7kl to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:50 +0000 UTC Normal Pod vertx-create-span-sidecar-7fd767f857-lp7kl AddedInterface Add eth0 [10.128.2.64/23] from ovn-kubernetes multus logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:50 +0000 UTC Normal Pod vertx-create-span-sidecar-7fd767f857-lp7kl.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:50 +0000 UTC Normal Pod vertx-create-span-sidecar-7fd767f857-lp7kl.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:50 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7fd767f857 SuccessfulCreate Created pod: vertx-create-span-sidecar-7fd767f857-lp7kl replicaset-controller logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:50 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-7fd767f857 to 1 deployment-controller logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:51 +0000 UTC Normal Pod vertx-create-span-sidecar-7fd767f857-lp7kl.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:51 +0000 UTC Normal Pod vertx-create-span-sidecar-7fd767f857-lp7kl.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:53 +0000 UTC Normal Pod vertx-create-span-sidecar-7fd767f857-lp7kl.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" in 2.534s (2.534s including waiting). Image size: 115697219 bytes. kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:53 +0000 UTC Normal Pod vertx-create-span-sidecar-7fd767f857-lp7kl.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:53 +0000 UTC Normal Pod vertx-create-span-sidecar-7fd767f857-lp7kl.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:55 +0000 UTC Normal Pod 00-find-service-74nlp Binding Scheduled Successfully assigned kuttl-test-wealthy-cattle/00-find-service-74nlp to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:55 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-74nlp job-controller logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:56 +0000 UTC Normal Pod 00-find-service-74nlp AddedInterface Add eth0 [10.129.2.58/23] from ovn-kubernetes multus logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:56 +0000 UTC Normal Pod 00-find-service-74nlp.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:57 +0000 UTC Warning Pod vertx-create-span-sidecar-797645c8fc-226pn.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.74:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:57 +0000 UTC Warning Pod vertx-create-span-sidecar-797645c8fc-226pn.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.74:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:58 +0000 UTC Normal Pod 00-find-service-74nlp.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 2.798s (2.798s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:58 +0000 UTC Normal Pod 00-find-service-74nlp.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:58 +0000 UTC Warning Pod vertx-create-span-sidecar-7fd767f857-lp7kl.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.64:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:58 +0000 UTC Warning Pod vertx-create-span-sidecar-7fd767f857-lp7kl.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.64:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:59 +0000 UTC Normal Pod 00-find-service-74nlp.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:59 +0000 UTC Normal Pod vertx-create-span-sidecar-797645c8fc-226pn.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:59 +0000 UTC Warning Pod vertx-create-span-sidecar-797645c8fc-226pn.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.74:8080/": read tcp 10.131.0.2:55198->10.131.0.74:8080: read: connection reset by peer kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:03:59 +0000 UTC Warning Pod vertx-create-span-sidecar-797645c8fc-226pn.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.74:8080/": dial tcp 10.131.0.74:8080: connect: connection refused kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:00 +0000 UTC Normal Pod vertx-create-span-sidecar-7fd767f857-lp7kl.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:00 +0000 UTC Warning Pod vertx-create-span-sidecar-7fd767f857-lp7kl.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.64:8080/": read tcp 10.128.2.2:40568->10.128.2.64:8080: read: connection reset by peer kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:00 +0000 UTC Warning Pod vertx-create-span-sidecar-7fd767f857-lp7kl.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.64:8080/": dial tcp 10.128.2.64:8080: connect: connection refused kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:01 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:05 +0000 UTC Normal Pod agent-as-sidecar2-7cfffd456d-t8r67 Binding Scheduled Successfully assigned kuttl-test-wealthy-cattle/agent-as-sidecar2-7cfffd456d-t8r67 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:05 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-7cfffd456d SuccessfulCreate Created pod: agent-as-sidecar2-7cfffd456d-t8r67 replicaset-controller logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:05 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-7cfffd456d to 1 deployment-controller logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:06 +0000 UTC Normal Pod agent-as-sidecar2-7cfffd456d-t8r67 AddedInterface Add eth0 [10.131.0.75/23] from ovn-kubernetes multus logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:06 +0000 UTC Normal Pod agent-as-sidecar2-7cfffd456d-t8r67.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:06 +0000 UTC Normal Pod agent-as-sidecar2-7cfffd456d-t8r67.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:06 +0000 UTC Normal Pod agent-as-sidecar2-7cfffd456d-t8r67.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:08 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-vbfjq job-controller logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:08 +0000 UTC Normal Pod agent-as-sidecar-567f964dfd-5mh9k.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:09 +0000 UTC Normal Pod 01-find-service-vbfjq Binding Scheduled Successfully assigned kuttl-test-wealthy-cattle/01-find-service-vbfjq to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:09 +0000 UTC Normal Pod 01-find-service-vbfjq AddedInterface Add eth0 [10.129.2.59/23] from ovn-kubernetes multus logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:09 +0000 UTC Normal Pod 01-find-service-vbfjq.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:10 +0000 UTC Normal Pod 01-find-service-vbfjq.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 1.224s (1.224s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:10 +0000 UTC Warning Pod vertx-create-span-sidecar-797645c8fc-226pn.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.74:8080/": read tcp 10.131.0.2:33754->10.131.0.74:8080: read: connection reset by peer kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:11 +0000 UTC Normal Pod 01-find-service-vbfjq.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:11 +0000 UTC Normal Pod 01-find-service-vbfjq.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:13 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-797645c8fc SuccessfulDelete Deleted pod: vertx-create-span-sidecar-797645c8fc-226pn replicaset-controller logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:13 +0000 UTC Normal Pod vertx-create-span-sidecar-86698f66c5-m957g Binding Scheduled Successfully assigned kuttl-test-wealthy-cattle/vertx-create-span-sidecar-86698f66c5-m957g to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:13 +0000 UTC Normal Pod vertx-create-span-sidecar-86698f66c5-m957g AddedInterface Add eth0 [10.128.2.65/23] from ovn-kubernetes multus logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:13 +0000 UTC Normal Pod vertx-create-span-sidecar-86698f66c5-m957g.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:13 +0000 UTC Normal Pod vertx-create-span-sidecar-86698f66c5-m957g.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:13 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-86698f66c5 SuccessfulCreate Created pod: vertx-create-span-sidecar-86698f66c5-m957g replicaset-controller logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:13 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-797645c8fc to 0 from 1 deployment-controller logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:13 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-86698f66c5 to 1 from 0 deployment-controller logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:14 +0000 UTC Normal Pod vertx-create-span-sidecar-86698f66c5-m957g.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:14 +0000 UTC Normal Pod vertx-create-span-sidecar-86698f66c5-m957g.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:14 +0000 UTC Normal Pod vertx-create-span-sidecar-86698f66c5-m957g.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:14 +0000 UTC Normal Pod vertx-create-span-sidecar-86698f66c5-m957g.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:21 +0000 UTC Warning Pod vertx-create-span-sidecar-86698f66c5-m957g.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.65:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:21 +0000 UTC Warning Pod vertx-create-span-sidecar-86698f66c5-m957g.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.65:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:23 +0000 UTC Normal Pod vertx-create-span-sidecar-86698f66c5-m957g.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:24 +0000 UTC Warning Pod vertx-create-span-sidecar-86698f66c5-m957g.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.65:8080/": read tcp 10.128.2.2:34528->10.128.2.65:8080: read: connection reset by peer kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:24 +0000 UTC Warning Pod vertx-create-span-sidecar-86698f66c5-m957g.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.65:8080/": dial tcp 10.128.2.65:8080: connect: connection refused kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:29 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:29 +0000 UTC Normal Pod vertx-create-span-sidecar-5f7b944b48-62b5q Binding Scheduled Successfully assigned kuttl-test-wealthy-cattle/vertx-create-span-sidecar-5f7b944b48-62b5q to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:29 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-5f7b944b48 SuccessfulCreate Created pod: vertx-create-span-sidecar-5f7b944b48-62b5q replicaset-controller logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:29 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7fd767f857 SuccessfulDelete Deleted pod: vertx-create-span-sidecar-7fd767f857-lp7kl replicaset-controller logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:29 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-7fd767f857 to 0 from 1 deployment-controller logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:29 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-5f7b944b48 to 1 from 0 deployment-controller logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:30 +0000 UTC Normal Pod vertx-create-span-sidecar-5f7b944b48-62b5q AddedInterface Add eth0 [10.129.2.60/23] from ovn-kubernetes multus logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:30 +0000 UTC Normal Pod vertx-create-span-sidecar-5f7b944b48-62b5q.spec.containers{vertx-create-span-sidecar} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:33 +0000 UTC Normal Pod vertx-create-span-sidecar-5f7b944b48-62b5q.spec.containers{vertx-create-span-sidecar} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.136s (3.136s including waiting). Image size: 282912835 bytes. kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:33 +0000 UTC Normal Pod vertx-create-span-sidecar-5f7b944b48-62b5q.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 14:04:35 | sidecar-namespace | 2024-12-02 14:04:33 +0000 UTC Normal Pod vertx-create-span-sidecar-5f7b944b48-62b5q.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 14:04:35 | sidecar-namespace | Deleting namespace: kuttl-test-wealthy-cattle === CONT kuttl/harness/sidecar-skip-webhook logger.go:42: 14:04:41 | sidecar-skip-webhook | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 14:04:41 | sidecar-skip-webhook | Creating namespace: kuttl-test-discrete-calf logger.go:42: 14:04:41 | sidecar-skip-webhook/0-install | starting test step 0-install logger.go:42: 14:04:42 | sidecar-skip-webhook/0-install | Jaeger:kuttl-test-discrete-calf/agent-as-sidecar created logger.go:42: 14:04:48 | sidecar-skip-webhook/0-install | test step completed 0-install logger.go:42: 14:04:48 | sidecar-skip-webhook/1-install | starting test step 1-install logger.go:42: 14:04:48 | sidecar-skip-webhook/1-install | Deployment:kuttl-test-discrete-calf/vertx-create-span-sidecar created logger.go:42: 14:04:50 | sidecar-skip-webhook/1-install | test step completed 1-install logger.go:42: 14:04:50 | sidecar-skip-webhook/2-add-anotation-and-label | starting test step 2-add-anotation-and-label logger.go:42: 14:04:50 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name=jaeger-operator --namespace kuttl-test-discrete-calf] logger.go:42: 14:04:50 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar labeled logger.go:42: 14:04:50 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-discrete-calf] logger.go:42: 14:04:50 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar annotated logger.go:42: 14:04:50 | sidecar-skip-webhook/2-add-anotation-and-label | test step completed 2-add-anotation-and-label logger.go:42: 14:04:50 | sidecar-skip-webhook/3-remove-label | starting test step 3-remove-label logger.go:42: 14:04:50 | sidecar-skip-webhook/3-remove-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name- --namespace kuttl-test-discrete-calf] logger.go:42: 14:04:51 | sidecar-skip-webhook/3-remove-label | deployment.apps/vertx-create-span-sidecar unlabeled logger.go:42: 14:04:53 | sidecar-skip-webhook/3-remove-label | test step completed 3-remove-label logger.go:42: 14:04:53 | sidecar-skip-webhook | sidecar-skip-webhook events from ns kuttl-test-discrete-calf: logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:46 +0000 UTC Normal Pod agent-as-sidecar-7cb7fd6d76-4wwgf Binding Scheduled Successfully assigned kuttl-test-discrete-calf/agent-as-sidecar-7cb7fd6d76-4wwgf to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:46 +0000 UTC Normal Pod agent-as-sidecar-7cb7fd6d76-4wwgf AddedInterface Add eth0 [10.128.2.66/23] from ovn-kubernetes multus logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:46 +0000 UTC Normal Pod agent-as-sidecar-7cb7fd6d76-4wwgf.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:46 +0000 UTC Normal Pod agent-as-sidecar-7cb7fd6d76-4wwgf.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:46 +0000 UTC Normal Pod agent-as-sidecar-7cb7fd6d76-4wwgf.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:46 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-7cb7fd6d76 SuccessfulCreate Created pod: agent-as-sidecar-7cb7fd6d76-4wwgf replicaset-controller logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:46 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-7cb7fd6d76 to 1 deployment-controller logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:48 +0000 UTC Normal Pod vertx-create-span-sidecar-797645c8fc-d2g55 Binding Scheduled Successfully assigned kuttl-test-discrete-calf/vertx-create-span-sidecar-797645c8fc-d2g55 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:48 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-797645c8fc SuccessfulCreate Created pod: vertx-create-span-sidecar-797645c8fc-d2g55 replicaset-controller logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:48 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-797645c8fc to 1 deployment-controller logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:49 +0000 UTC Normal Pod vertx-create-span-sidecar-797645c8fc-d2g55 AddedInterface Add eth0 [10.131.0.76/23] from ovn-kubernetes multus logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:49 +0000 UTC Normal Pod vertx-create-span-sidecar-797645c8fc-d2g55.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:49 +0000 UTC Normal Pod vertx-create-span-sidecar-797645c8fc-d2g55.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:49 +0000 UTC Normal Pod vertx-create-span-sidecar-797645c8fc-d2g55.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:50 +0000 UTC Normal Pod vertx-create-span-sidecar-74644bb59-hk69f Binding Scheduled Successfully assigned kuttl-test-discrete-calf/vertx-create-span-sidecar-74644bb59-hk69f to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:50 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-74644bb59 SuccessfulCreate Created pod: vertx-create-span-sidecar-74644bb59-hk69f replicaset-controller logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:50 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-74644bb59 to 1 deployment-controller logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:51 +0000 UTC Normal Pod vertx-create-span-sidecar-74644bb59-hk69f AddedInterface Add eth0 [10.128.2.67/23] from ovn-kubernetes multus logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:51 +0000 UTC Normal Pod vertx-create-span-sidecar-74644bb59-hk69f.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:51 +0000 UTC Normal Pod vertx-create-span-sidecar-74644bb59-hk69f.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:51 +0000 UTC Normal Pod vertx-create-span-sidecar-74644bb59-hk69f.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:51 +0000 UTC Normal Pod vertx-create-span-sidecar-74644bb59-hk69f.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:51 +0000 UTC Normal Pod vertx-create-span-sidecar-74644bb59-hk69f.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 14:04:53 | sidecar-skip-webhook | 2024-12-02 14:04:51 +0000 UTC Normal Pod vertx-create-span-sidecar-74644bb59-hk69f.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 14:04:53 | sidecar-skip-webhook | Deleting namespace: kuttl-test-discrete-calf === CONT kuttl/harness/sidecar-deployment logger.go:42: 14:04:59 | sidecar-deployment | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 14:04:59 | sidecar-deployment | Creating namespace: kuttl-test-eternal-bulldog logger.go:42: 14:04:59 | sidecar-deployment/0-install | starting test step 0-install logger.go:42: 14:04:59 | sidecar-deployment/0-install | Jaeger:kuttl-test-eternal-bulldog/agent-as-sidecar created logger.go:42: 14:05:06 | sidecar-deployment/0-install | test step completed 0-install logger.go:42: 14:05:06 | sidecar-deployment/1-install | starting test step 1-install logger.go:42: 14:05:07 | sidecar-deployment/1-install | Deployment:kuttl-test-eternal-bulldog/vertx-create-span-sidecar created logger.go:42: 14:05:08 | sidecar-deployment/1-install | test step completed 1-install logger.go:42: 14:05:08 | sidecar-deployment/2-enable-injection | starting test step 2-enable-injection logger.go:42: 14:05:08 | sidecar-deployment/2-enable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-eternal-bulldog] logger.go:42: 14:05:08 | sidecar-deployment/2-enable-injection | deployment.apps/vertx-create-span-sidecar annotated logger.go:42: 14:05:10 | sidecar-deployment/2-enable-injection | test step completed 2-enable-injection logger.go:42: 14:05:10 | sidecar-deployment/3-find-service | starting test step 3-find-service logger.go:42: 14:05:10 | sidecar-deployment/3-find-service | Job:kuttl-test-eternal-bulldog/00-find-service created logger.go:42: 14:05:25 | sidecar-deployment/3-find-service | test step completed 3-find-service logger.go:42: 14:05:25 | sidecar-deployment/4-other-instance | starting test step 4-other-instance logger.go:42: 14:05:25 | sidecar-deployment/4-other-instance | Jaeger:kuttl-test-eternal-bulldog/agent-as-sidecar2 created logger.go:42: 14:05:32 | sidecar-deployment/4-other-instance | test step completed 4-other-instance logger.go:42: 14:05:32 | sidecar-deployment/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 14:05:32 | sidecar-deployment/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 14:05:32 | sidecar-deployment/6-find-service | starting test step 6-find-service logger.go:42: 14:05:32 | sidecar-deployment/6-find-service | Job:kuttl-test-eternal-bulldog/01-find-service created logger.go:42: 14:05:52 | sidecar-deployment/6-find-service | test step completed 6-find-service logger.go:42: 14:05:52 | sidecar-deployment/7-disable-injection | starting test step 7-disable-injection logger.go:42: 14:05:52 | sidecar-deployment/7-disable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=false --namespace kuttl-test-eternal-bulldog] logger.go:42: 14:05:52 | sidecar-deployment/7-disable-injection | deployment.apps/vertx-create-span-sidecar annotated logger.go:42: 14:05:55 | sidecar-deployment/7-disable-injection | test step completed 7-disable-injection logger.go:42: 14:05:55 | sidecar-deployment | sidecar-deployment events from ns kuttl-test-eternal-bulldog: logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:03 +0000 UTC Normal Pod agent-as-sidecar-65d954d79c-w7xcb Binding Scheduled Successfully assigned kuttl-test-eternal-bulldog/agent-as-sidecar-65d954d79c-w7xcb to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:03 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-65d954d79c SuccessfulCreate Created pod: agent-as-sidecar-65d954d79c-w7xcb replicaset-controller logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:03 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-65d954d79c to 1 deployment-controller logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:04 +0000 UTC Normal Pod agent-as-sidecar-65d954d79c-w7xcb AddedInterface Add eth0 [10.128.2.68/23] from ovn-kubernetes multus logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:04 +0000 UTC Normal Pod agent-as-sidecar-65d954d79c-w7xcb.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:04 +0000 UTC Normal Pod agent-as-sidecar-65d954d79c-w7xcb.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:04 +0000 UTC Normal Pod agent-as-sidecar-65d954d79c-w7xcb.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:07 +0000 UTC Normal Pod vertx-create-span-sidecar-797645c8fc-xb9zh Binding Scheduled Successfully assigned kuttl-test-eternal-bulldog/vertx-create-span-sidecar-797645c8fc-xb9zh to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:07 +0000 UTC Normal Pod vertx-create-span-sidecar-797645c8fc-xb9zh AddedInterface Add eth0 [10.131.0.77/23] from ovn-kubernetes multus logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:07 +0000 UTC Normal Pod vertx-create-span-sidecar-797645c8fc-xb9zh.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:07 +0000 UTC Normal Pod vertx-create-span-sidecar-797645c8fc-xb9zh.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:07 +0000 UTC Normal Pod vertx-create-span-sidecar-797645c8fc-xb9zh.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:07 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-797645c8fc SuccessfulCreate Created pod: vertx-create-span-sidecar-797645c8fc-xb9zh replicaset-controller logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:07 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-797645c8fc to 1 deployment-controller logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:08 +0000 UTC Normal Pod vertx-create-span-sidecar-bc94dbdcd-rbh65 Binding Scheduled Successfully assigned kuttl-test-eternal-bulldog/vertx-create-span-sidecar-bc94dbdcd-rbh65 to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:08 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-bc94dbdcd SuccessfulCreate Created pod: vertx-create-span-sidecar-bc94dbdcd-rbh65 replicaset-controller logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:08 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-bc94dbdcd to 1 deployment-controller logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:09 +0000 UTC Normal Pod vertx-create-span-sidecar-bc94dbdcd-rbh65 AddedInterface Add eth0 [10.128.2.69/23] from ovn-kubernetes multus logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:09 +0000 UTC Normal Pod vertx-create-span-sidecar-bc94dbdcd-rbh65.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:09 +0000 UTC Normal Pod vertx-create-span-sidecar-bc94dbdcd-rbh65.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:09 +0000 UTC Normal Pod vertx-create-span-sidecar-bc94dbdcd-rbh65.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:09 +0000 UTC Normal Pod vertx-create-span-sidecar-bc94dbdcd-rbh65.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:09 +0000 UTC Normal Pod vertx-create-span-sidecar-bc94dbdcd-rbh65.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:09 +0000 UTC Normal Pod vertx-create-span-sidecar-bc94dbdcd-rbh65.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:10 +0000 UTC Normal Pod 00-find-service-fqwzw Binding Scheduled Successfully assigned kuttl-test-eternal-bulldog/00-find-service-fqwzw to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:10 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-fqwzw job-controller logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:11 +0000 UTC Normal Pod 00-find-service-fqwzw AddedInterface Add eth0 [10.129.2.61/23] from ovn-kubernetes multus logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:11 +0000 UTC Normal Pod 00-find-service-fqwzw.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:15 +0000 UTC Normal Pod 00-find-service-fqwzw.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 3.826s (3.826s including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:15 +0000 UTC Normal Pod 00-find-service-fqwzw.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:15 +0000 UTC Normal Pod 00-find-service-fqwzw.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:15 +0000 UTC Warning Pod vertx-create-span-sidecar-797645c8fc-xb9zh.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.77:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:15 +0000 UTC Warning Pod vertx-create-span-sidecar-797645c8fc-xb9zh.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.131.0.77:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:17 +0000 UTC Normal Pod vertx-create-span-sidecar-797645c8fc-xb9zh.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:17 +0000 UTC Warning Pod vertx-create-span-sidecar-797645c8fc-xb9zh.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.77:8080/": read tcp 10.131.0.2:34208->10.131.0.77:8080: read: connection reset by peer kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:17 +0000 UTC Warning Pod vertx-create-span-sidecar-797645c8fc-xb9zh.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.77:8080/": dial tcp 10.131.0.77:8080: connect: connection refused kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:17 +0000 UTC Warning Pod vertx-create-span-sidecar-bc94dbdcd-rbh65.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.69:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:17 +0000 UTC Warning Pod vertx-create-span-sidecar-bc94dbdcd-rbh65.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.69:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:19 +0000 UTC Normal Pod vertx-create-span-sidecar-bc94dbdcd-rbh65.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:20 +0000 UTC Warning Pod vertx-create-span-sidecar-bc94dbdcd-rbh65.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.69:8080/": read tcp 10.128.2.2:58374->10.128.2.69:8080: read: connection reset by peer kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:20 +0000 UTC Warning Pod vertx-create-span-sidecar-bc94dbdcd-rbh65.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.69:8080/": dial tcp 10.128.2.69:8080: connect: connection refused kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:25 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:28 +0000 UTC Warning Pod vertx-create-span-sidecar-797645c8fc-xb9zh.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.131.0.77:8080/": read tcp 10.131.0.2:43820->10.131.0.77:8080: read: connection reset by peer kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:30 +0000 UTC Normal Pod agent-as-sidecar2-85cd67ff85-mcl6n Binding Scheduled Successfully assigned kuttl-test-eternal-bulldog/agent-as-sidecar2-85cd67ff85-mcl6n to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:30 +0000 UTC Normal Pod agent-as-sidecar2-85cd67ff85-mcl6n AddedInterface Add eth0 [10.131.0.78/23] from ovn-kubernetes multus logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:30 +0000 UTC Normal Pod agent-as-sidecar2-85cd67ff85-mcl6n.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:30 +0000 UTC Normal Pod agent-as-sidecar2-85cd67ff85-mcl6n.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:30 +0000 UTC Normal Pod agent-as-sidecar2-85cd67ff85-mcl6n.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:30 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-85cd67ff85 SuccessfulCreate Created pod: agent-as-sidecar2-85cd67ff85-mcl6n replicaset-controller logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:30 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-85cd67ff85 to 1 deployment-controller logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:32 +0000 UTC Normal Pod 01-find-service-7m2fc Binding Scheduled Successfully assigned kuttl-test-eternal-bulldog/01-find-service-7m2fc to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:32 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-7m2fc job-controller logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:32 +0000 UTC Normal Pod agent-as-sidecar-65d954d79c-w7xcb.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:33 +0000 UTC Normal Pod 01-find-service-7m2fc AddedInterface Add eth0 [10.129.2.62/23] from ovn-kubernetes multus logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:33 +0000 UTC Normal Pod 01-find-service-7m2fc.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:33 +0000 UTC Normal Pod 01-find-service-7m2fc.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 785ms (785ms including waiting). Image size: 60976023 bytes. kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:33 +0000 UTC Normal Pod vertx-create-span-sidecar-645794bb76-jvm7f Binding Scheduled Successfully assigned kuttl-test-eternal-bulldog/vertx-create-span-sidecar-645794bb76-jvm7f to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:33 +0000 UTC Normal Pod vertx-create-span-sidecar-645794bb76-jvm7f AddedInterface Add eth0 [10.128.2.70/23] from ovn-kubernetes multus logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:33 +0000 UTC Normal Pod vertx-create-span-sidecar-645794bb76-jvm7f.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:33 +0000 UTC Normal Pod vertx-create-span-sidecar-645794bb76-jvm7f.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:33 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-645794bb76 SuccessfulCreate Created pod: vertx-create-span-sidecar-645794bb76-jvm7f replicaset-controller logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:33 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-797645c8fc SuccessfulDelete Deleted pod: vertx-create-span-sidecar-797645c8fc-xb9zh replicaset-controller logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:33 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-797645c8fc to 0 from 1 deployment-controller logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:33 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-645794bb76 to 1 from 0 deployment-controller logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:34 +0000 UTC Normal Pod 01-find-service-7m2fc.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:34 +0000 UTC Normal Pod 01-find-service-7m2fc.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:34 +0000 UTC Normal Pod vertx-create-span-sidecar-645794bb76-jvm7f.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:34 +0000 UTC Normal Pod vertx-create-span-sidecar-645794bb76-jvm7f.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:34 +0000 UTC Normal Pod vertx-create-span-sidecar-645794bb76-jvm7f.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:34 +0000 UTC Normal Pod vertx-create-span-sidecar-645794bb76-jvm7f.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:42 +0000 UTC Warning Pod vertx-create-span-sidecar-645794bb76-jvm7f.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.70:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:42 +0000 UTC Warning Pod vertx-create-span-sidecar-645794bb76-jvm7f.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.128.2.70:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:44 +0000 UTC Normal Pod vertx-create-span-sidecar-645794bb76-jvm7f.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:45 +0000 UTC Warning Pod vertx-create-span-sidecar-645794bb76-jvm7f.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.70:8080/": read tcp 10.128.2.2:42250->10.128.2.70:8080: read: connection reset by peer kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:45 +0000 UTC Warning Pod vertx-create-span-sidecar-645794bb76-jvm7f.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.128.2.70:8080/": dial tcp 10.128.2.70:8080: connect: connection refused kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:52 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:52 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-bc94dbdcd SuccessfulDelete Deleted pod: vertx-create-span-sidecar-bc94dbdcd-rbh65 replicaset-controller logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:52 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-bc94dbdcd to 0 from 1 deployment-controller logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:52 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-c5cd95f4f to 1 from 0 deployment-controller logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:53 +0000 UTC Normal Pod vertx-create-span-sidecar-c5cd95f4f-jg59m Binding Scheduled Successfully assigned kuttl-test-eternal-bulldog/vertx-create-span-sidecar-c5cd95f4f-jg59m to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:53 +0000 UTC Normal Pod vertx-create-span-sidecar-c5cd95f4f-jg59m AddedInterface Add eth0 [10.129.2.63/23] from ovn-kubernetes multus logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:53 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-c5cd95f4f SuccessfulCreate Created pod: vertx-create-span-sidecar-c5cd95f4f-jg59m replicaset-controller logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:54 +0000 UTC Normal Pod vertx-create-span-sidecar-c5cd95f4f-jg59m.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:54 +0000 UTC Normal Pod vertx-create-span-sidecar-c5cd95f4f-jg59m.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 14:05:55 | sidecar-deployment | 2024-12-02 14:05:54 +0000 UTC Normal Pod vertx-create-span-sidecar-c5cd95f4f-jg59m.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 14:05:55 | sidecar-deployment | Deleting namespace: kuttl-test-eternal-bulldog === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (147.84s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.34s) --- PASS: kuttl/harness/sidecar-namespace (60.52s) --- PASS: kuttl/harness/sidecar-skip-webhook (17.78s) --- PASS: kuttl/harness/sidecar-deployment (63.03s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name sidecar --report --output /logs/artifacts/sidecar.xml ./artifacts/kuttl-report.xml time="2024-12-02T14:06:02Z" level=debug msg="Setting a new name for the test suites" time="2024-12-02T14:06:02Z" level=debug msg="Removing 'artifacts' TestCase" time="2024-12-02T14:06:02Z" level=debug msg="normalizing test case names" time="2024-12-02T14:06:02Z" level=debug msg="sidecar/artifacts -> sidecar_artifacts" time="2024-12-02T14:06:02Z" level=debug msg="sidecar/sidecar-namespace -> sidecar_sidecar_namespace" time="2024-12-02T14:06:02Z" level=debug msg="sidecar/sidecar-skip-webhook -> sidecar_sidecar_skip_webhook" time="2024-12-02T14:06:02Z" level=debug msg="sidecar/sidecar-deployment -> sidecar_sidecar_deployment" +------------------------------+--------+ | NAME | RESULT | +------------------------------+--------+ | sidecar_artifacts | passed | | sidecar_sidecar_namespace | passed | | sidecar_sidecar_skip_webhook | passed | | sidecar_sidecar_deployment | passed | +------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh streaming false true + '[' 3 -ne 3 ']' + test_suite_name=streaming + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/streaming.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-streaming make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=3.6.0 \ SKIP_KAFKA=false \ SKIP_ES_EXTERNAL=true \ ./tests/e2e/streaming/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.18.0-0.nightly-2024-11-30-141716 True False 57m Cluster version is 4.18.0-0.nightly-2024-11-30-141716' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.18.0-0.nightly-2024-11-30-141716 True False 57m Cluster version is 4.18.0-0.nightly-2024-11-30-141716' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/streaming/render.sh ++ export SUITE_DIR=./tests/e2e/streaming ++ SUITE_DIR=./tests/e2e/streaming ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/streaming ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + '[' false = true ']' + start_test streaming-simple + '[' 1 -ne 1 ']' + test_name=streaming-simple + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-simple' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-simple\e[0m' Rendering files for test streaming-simple + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build + '[' _build '!=' _build ']' + mkdir -p streaming-simple + cd streaming-simple + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./01-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/streaming-jaeger-assert.yaml.template -o ./04-assert.yaml + render_smoke_test simple-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=simple-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + export JAEGER_NAME=simple-streaming + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-tls + '[' 1 -ne 1 ']' + test_name=streaming-with-tls + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-tls' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-tls\e[0m' Rendering files for test streaming-with-tls + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-simple + '[' streaming-simple '!=' _build ']' + cd .. + mkdir -p streaming-with-tls + cd streaming-with-tls + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./01-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + render_smoke_test tls-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=tls-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + export JAEGER_NAME=tls-streaming + JAEGER_NAME=tls-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-autoprovisioning-autoscale + '[' 1 -ne 1 ']' + test_name=streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-autoprovisioning-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-autoprovisioning-autoscale\e[0m' Rendering files for test streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-with-tls + '[' streaming-with-tls '!=' _build ']' + cd .. + mkdir -p streaming-with-autoprovisioning-autoscale + cd streaming-with-autoprovisioning-autoscale + '[' true = true ']' + rm ./00-install.yaml ./00-assert.yaml + render_install_elasticsearch upstream 01 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=01 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./01-assert.yaml + jaeger_name=auto-provisioned + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="20Mi"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="500m"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.autoscale=true ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.minReplicas=1 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.maxReplicas=2 ./02-install.yaml + render_assert_kafka true auto-provisioned 03 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provisioned + test_step=03 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./03-assert.yaml ++ expr 03 + 1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./04-assert.yaml ++ expr 03 + 2 + CLUSTER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./05-assert.yaml + version_lt 1.30 1.23 ++ echo 1.30 1.23 ++ tr ' ' '\n' ++ sort -rV ++ head -n 1 + test 1.30 '!=' 1.30 + rm ./08-assert.yaml + skip_test streaming-with-tls 'This test is flaky in Prow CI' + '[' 2 -ne 2 ']' + test_name=streaming-with-tls + message='This test is flaky in Prow CI' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-with-autoprovisioning-autoscale + '[' streaming-with-autoprovisioning-autoscale '!=' _build ']' + cd .. + rm -rf streaming-with-tls + warning 'streaming-with-tls: This test is flaky in Prow CI' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: streaming-with-tls: This test is flaky in Prow CI\e[0m' WAR: streaming-with-tls: This test is flaky in Prow CI + skip_test streaming-simple 'This test is flaky in Prow CI' + '[' 2 -ne 2 ']' + test_name=streaming-simple + message='This test is flaky in Prow CI' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build + '[' _build '!=' _build ']' + rm -rf streaming-simple + warning 'streaming-simple: This test is flaky in Prow CI' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: streaming-simple: This test is flaky in Prow CI\e[0m' WAR: streaming-simple: This test is flaky in Prow CI make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running streaming E2E tests' Running streaming E2E tests + cd tests/e2e/streaming/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1113221797 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 2 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/streaming-with-autoprovisioning-autoscale === PAUSE kuttl/harness/streaming-with-autoprovisioning-autoscale === CONT kuttl/harness/artifacts logger.go:42: 14:06:15 | artifacts | Creating namespace: kuttl-test-positive-gecko logger.go:42: 14:06:15 | artifacts | artifacts events from ns kuttl-test-positive-gecko: logger.go:42: 14:06:15 | artifacts | Deleting namespace: kuttl-test-positive-gecko === CONT kuttl/harness/streaming-with-autoprovisioning-autoscale logger.go:42: 14:06:21 | streaming-with-autoprovisioning-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 14:06:21 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 14:06:21 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 14:06:21 | streaming-with-autoprovisioning-autoscale | Creating namespace: kuttl-test-direct-anemone logger.go:42: 14:06:21 | streaming-with-autoprovisioning-autoscale/1-install | starting test step 1-install logger.go:42: 14:06:21 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc create sa deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 14:06:21 | streaming-with-autoprovisioning-autoscale/1-install | serviceaccount/deploy-elasticsearch created logger.go:42: 14:06:21 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc adm policy add-scc-to-user privileged -z deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 14:06:21 | streaming-with-autoprovisioning-autoscale/1-install | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:privileged added: "deploy-elasticsearch" logger.go:42: 14:06:21 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 6] logger.go:42: 14:06:27 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_0.yml -n $NAMESPACE] logger.go:42: 14:06:27 | streaming-with-autoprovisioning-autoscale/1-install | statefulset.apps/elasticsearch created logger.go:42: 14:06:27 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 3] logger.go:42: 14:06:30 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_1.yml -n $NAMESPACE] logger.go:42: 14:06:31 | streaming-with-autoprovisioning-autoscale/1-install | service/elasticsearch created logger.go:42: 14:06:48 | streaming-with-autoprovisioning-autoscale/1-install | test step completed 1-install logger.go:42: 14:06:48 | streaming-with-autoprovisioning-autoscale/2-install | starting test step 2-install logger.go:42: 14:06:48 | streaming-with-autoprovisioning-autoscale/2-install | Jaeger:kuttl-test-direct-anemone/auto-provisioned created logger.go:42: 14:06:48 | streaming-with-autoprovisioning-autoscale/2-install | test step completed 2-install logger.go:42: 14:06:48 | streaming-with-autoprovisioning-autoscale/3- | starting test step 3- logger.go:42: 14:07:35 | streaming-with-autoprovisioning-autoscale/3- | test step completed 3- logger.go:42: 14:07:35 | streaming-with-autoprovisioning-autoscale/4- | starting test step 4- logger.go:42: 14:08:15 | streaming-with-autoprovisioning-autoscale/4- | test step completed 4- logger.go:42: 14:08:15 | streaming-with-autoprovisioning-autoscale/5- | starting test step 5- logger.go:42: 14:08:38 | streaming-with-autoprovisioning-autoscale/5- | test step completed 5- logger.go:42: 14:08:38 | streaming-with-autoprovisioning-autoscale/6- | starting test step 6- logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale/6- | test step completed 6- logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale/7- | starting test step 7- logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale/7- | test step completed 7- logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | streaming-with-autoprovisioning-autoscale events from ns kuttl-test-direct-anemone: logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:06:27 +0000 UTC Normal Pod elasticsearch-0 Binding Scheduled Successfully assigned kuttl-test-direct-anemone/elasticsearch-0 to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:06:27 +0000 UTC Normal StatefulSet.apps elasticsearch SuccessfulCreate create Pod elasticsearch-0 in StatefulSet elasticsearch successful statefulset-controller logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:06:28 +0000 UTC Normal Pod elasticsearch-0 AddedInterface Add eth0 [10.128.2.71/23] from ovn-kubernetes multus logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:06:28 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulling Pulling image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:06:35 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulled Successfully pulled image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" in 7.18s (7.18s including waiting). Image size: 758467647 bytes. kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:06:35 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:06:35 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:06:43 +0000 UTC Warning Pod elasticsearch-0.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Get "http://10.128.2.71:9200/": dial tcp 10.128.2.71:9200: connect: connection refused kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:06:55 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-zookeeper NoPods No matching pods found controllermanager logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:06:55 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:06:56 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-direct-anemone/data-auto-provisioned-zookeeper-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-7df64c598f-fzptl_da3cf643-528b-4f7c-9dac-eb4f20fff859 logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:06:56 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:06:58 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-8af2e429-caef-4f47-9177-d2b94c34aea8 ebs.csi.aws.com_aws-ebs-csi-driver-controller-7df64c598f-fzptl_da3cf643-528b-4f7c-9dac-eb4f20fff859 logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:06:59 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-direct-anemone/auto-provisioned-zookeeper-0 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:07:02 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-8af2e429-caef-4f47-9177-d2b94c34aea8" attachdetach-controller logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:07:05 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 AddedInterface Add eth0 [10.131.0.79/23] from ovn-kubernetes multus logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:07:05 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulling Pulling image "registry.redhat.io/amq-streams/kafka-38-rhel9@sha256:c9a60d03827466ee37dd4aff6803eda2c1d65cb2508cb57d13e1d47f982e5a20" kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:07:18 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulled Successfully pulled image "registry.redhat.io/amq-streams/kafka-38-rhel9@sha256:c9a60d03827466ee37dd4aff6803eda2c1d65cb2508cb57d13e1d47f982e5a20" in 12.946s (12.946s including waiting). Image size: 616797249 bytes. kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:07:18 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:07:18 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:07:37 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:07:38 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-kafka NoPods No matching pods found controllermanager logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:07:38 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:07:38 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-direct-anemone/data-0-auto-provisioned-kafka-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-7df64c598f-fzptl_da3cf643-528b-4f7c-9dac-eb4f20fff859 logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:07:41 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-e2001873-a2e3-4206-976c-bb84e3b73190 ebs.csi.aws.com_aws-ebs-csi-driver-controller-7df64c598f-fzptl_da3cf643-528b-4f7c-9dac-eb4f20fff859 logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:07:42 +0000 UTC Normal Pod auto-provisioned-kafka-0 Binding Scheduled Successfully assigned kuttl-test-direct-anemone/auto-provisioned-kafka-0 to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:07:44 +0000 UTC Normal Pod auto-provisioned-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-e2001873-a2e3-4206-976c-bb84e3b73190" attachdetach-controller logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:07:45 +0000 UTC Normal Pod auto-provisioned-kafka-0 AddedInterface Add eth0 [10.128.2.72/23] from ovn-kubernetes multus logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:07:45 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Pulling Pulling image "registry.redhat.io/amq-streams/kafka-38-rhel9@sha256:c9a60d03827466ee37dd4aff6803eda2c1d65cb2508cb57d13e1d47f982e5a20" kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:07:57 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Pulled Successfully pulled image "registry.redhat.io/amq-streams/kafka-38-rhel9@sha256:c9a60d03827466ee37dd4aff6803eda2c1d65cb2508cb57d13e1d47f982e5a20" in 11.579s (11.579s including waiting). Image size: 616797249 bytes. kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:07:57 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:07:57 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:17 +0000 UTC Normal Pod auto-provisioned-entity-operator-68c47dd5f-tccmh Binding Scheduled Successfully assigned kuttl-test-direct-anemone/auto-provisioned-entity-operator-68c47dd5f-tccmh to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:17 +0000 UTC Normal ReplicaSet.apps auto-provisioned-entity-operator-68c47dd5f SuccessfulCreate Created pod: auto-provisioned-entity-operator-68c47dd5f-tccmh replicaset-controller logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:17 +0000 UTC Normal Deployment.apps auto-provisioned-entity-operator ScalingReplicaSet Scaled up replica set auto-provisioned-entity-operator-68c47dd5f to 1 deployment-controller logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:18 +0000 UTC Normal Pod auto-provisioned-entity-operator-68c47dd5f-tccmh AddedInterface Add eth0 [10.128.2.73/23] from ovn-kubernetes multus logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:18 +0000 UTC Normal Pod auto-provisioned-entity-operator-68c47dd5f-tccmh.spec.containers{topic-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel9-operator@sha256:07a9540a8b906c6d52e6a9684cfb838aadf9849cc24b1d80218ea3ad5545cb5a" already present on machine kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:18 +0000 UTC Normal Pod auto-provisioned-entity-operator-68c47dd5f-tccmh.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:18 +0000 UTC Normal Pod auto-provisioned-entity-operator-68c47dd5f-tccmh.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:18 +0000 UTC Normal Pod auto-provisioned-entity-operator-68c47dd5f-tccmh.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel9-operator@sha256:07a9540a8b906c6d52e6a9684cfb838aadf9849cc24b1d80218ea3ad5545cb5a" already present on machine kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:18 +0000 UTC Normal Pod auto-provisioned-entity-operator-68c47dd5f-tccmh.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:18 +0000 UTC Normal Pod auto-provisioned-entity-operator-68c47dd5f-tccmh.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:40 +0000 UTC Normal Pod auto-provisioned-collector-5bd6d4c884-78lv6 Binding Scheduled Successfully assigned kuttl-test-direct-anemone/auto-provisioned-collector-5bd6d4c884-78lv6 to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:40 +0000 UTC Normal ReplicaSet.apps auto-provisioned-collector-5bd6d4c884 SuccessfulCreate Created pod: auto-provisioned-collector-5bd6d4c884-78lv6 replicaset-controller logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:40 +0000 UTC Normal Deployment.apps auto-provisioned-collector ScalingReplicaSet Scaled up replica set auto-provisioned-collector-5bd6d4c884 to 1 deployment-controller logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:40 +0000 UTC Normal Pod auto-provisioned-ingester-845cf549b5-tkn7t Binding Scheduled Successfully assigned kuttl-test-direct-anemone/auto-provisioned-ingester-845cf549b5-tkn7t to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:40 +0000 UTC Normal ReplicaSet.apps auto-provisioned-ingester-845cf549b5 SuccessfulCreate Created pod: auto-provisioned-ingester-845cf549b5-tkn7t replicaset-controller logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:40 +0000 UTC Normal Deployment.apps auto-provisioned-ingester ScalingReplicaSet Scaled up replica set auto-provisioned-ingester-845cf549b5 to 1 deployment-controller logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:40 +0000 UTC Normal Pod auto-provisioned-query-586d4487b7-q86t4 Binding Scheduled Successfully assigned kuttl-test-direct-anemone/auto-provisioned-query-586d4487b7-q86t4 to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:40 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-586d4487b7 SuccessfulCreate Created pod: auto-provisioned-query-586d4487b7-q86t4 replicaset-controller logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:40 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-586d4487b7 to 1 deployment-controller logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:41 +0000 UTC Warning Pod auto-provisioned-collector-5bd6d4c884-78lv6 FailedMount MountVolume.SetUp failed for volume "auto-provisioned-collector-tls-config-volume" : secret "auto-provisioned-collector-headless-tls" not found kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:41 +0000 UTC Normal Pod auto-provisioned-ingester-845cf549b5-tkn7t AddedInterface Add eth0 [10.129.2.65/23] from ovn-kubernetes multus logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:41 +0000 UTC Normal Pod auto-provisioned-ingester-845cf549b5-tkn7t.spec.containers{jaeger-ingester} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:e01a99dddffe65385d77dd6692558aa9a47df36f8b2a8d141b6ad561139981b2" kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:41 +0000 UTC Normal Pod auto-provisioned-query-586d4487b7-q86t4 AddedInterface Add eth0 [10.131.0.80/23] from ovn-kubernetes multus logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:41 +0000 UTC Normal Pod auto-provisioned-query-586d4487b7-q86t4.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:41 +0000 UTC Normal Pod auto-provisioned-query-586d4487b7-q86t4.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:41 +0000 UTC Normal Pod auto-provisioned-query-586d4487b7-q86t4.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:41 +0000 UTC Normal Pod auto-provisioned-query-586d4487b7-q86t4.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:41 +0000 UTC Normal Pod auto-provisioned-query-586d4487b7-q86t4.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:41 +0000 UTC Normal Pod auto-provisioned-query-586d4487b7-q86t4.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:41 +0000 UTC Normal Pod auto-provisioned-query-586d4487b7-q86t4.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:42 +0000 UTC Normal Pod auto-provisioned-collector-5bd6d4c884-78lv6 AddedInterface Add eth0 [10.129.2.64/23] from ovn-kubernetes multus logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:42 +0000 UTC Normal Pod auto-provisioned-collector-5bd6d4c884-78lv6.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:44686d560aa27cae8ff8693f88c4cb6e2edf1737010ec1f80709cb42250b729d" already present on machine kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:42 +0000 UTC Normal Pod auto-provisioned-collector-5bd6d4c884-78lv6.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:42 +0000 UTC Normal Pod auto-provisioned-collector-5bd6d4c884-78lv6.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:42 +0000 UTC Normal Pod auto-provisioned-query-586d4487b7-q86t4.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:42 +0000 UTC Normal Pod auto-provisioned-query-586d4487b7-q86t4.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:56 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:56 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod auto-provisioned-collector-5bd6d4c884-78lv6 horizontal-pod-autoscaler logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:56 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:56 +0000 UTC Normal Pod auto-provisioned-ingester-845cf549b5-tkn7t.spec.containers{jaeger-ingester} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:e01a99dddffe65385d77dd6692558aa9a47df36f8b2a8d141b6ad561139981b2" in 14.39s (14.39s including waiting). Image size: 139765359 bytes. kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:56 +0000 UTC Normal Pod auto-provisioned-ingester-845cf549b5-tkn7t.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:56 +0000 UTC Normal Pod auto-provisioned-ingester-845cf549b5-tkn7t.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:56 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:56 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:56 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling auto-provisioned-ingester FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 14:09:01 | streaming-with-autoprovisioning-autoscale | 2024-12-02 14:08:57 +0000 UTC Warning Pod auto-provisioned-ingester-845cf549b5-tkn7t.spec.containers{jaeger-ingester} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 14:09:02 | streaming-with-autoprovisioning-autoscale | Deleting namespace: kuttl-test-direct-anemone === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (209.06s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.98s) --- PASS: kuttl/harness/streaming-with-autoprovisioning-autoscale (202.89s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name streaming --report --output /logs/artifacts/streaming.xml ./artifacts/kuttl-report.xml time="2024-12-02T14:09:44Z" level=debug msg="Setting a new name for the test suites" time="2024-12-02T14:09:44Z" level=debug msg="Removing 'artifacts' TestCase" time="2024-12-02T14:09:44Z" level=debug msg="normalizing test case names" time="2024-12-02T14:09:44Z" level=debug msg="streaming/artifacts -> streaming_artifacts" time="2024-12-02T14:09:44Z" level=debug msg="streaming/streaming-with-autoprovisioning-autoscale -> streaming_streaming_with_autoprovisioning_autoscale" +-----------------------------------------------------+--------+ | NAME | RESULT | +-----------------------------------------------------+--------+ | streaming_artifacts | passed | | streaming_streaming_with_autoprovisioning_autoscale | passed | +-----------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh ui false true + '[' 3 -ne 3 ']' + test_suite_name=ui + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/ui.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-ui make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true ./tests/e2e/ui/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.18.0-0.nightly-2024-11-30-141716 True False 61m Cluster version is 4.18.0-0.nightly-2024-11-30-141716' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.18.0-0.nightly-2024-11-30-141716 True False 61m Cluster version is 4.18.0-0.nightly-2024-11-30-141716' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/ui/render.sh ++ export SUITE_DIR=./tests/e2e/ui ++ SUITE_DIR=./tests/e2e/ui ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/ui ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test allinone + '[' 1 -ne 1 ']' + test_name=allinone + echo =========================================================================== =========================================================================== + info 'Rendering files for test allinone' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test allinone\e[0m' Rendering files for test allinone + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build + '[' _build '!=' _build ']' + mkdir -p allinone + cd allinone + export GET_URL_COMMAND + export URL + export JAEGER_NAME=all-in-one-ui + JAEGER_NAME=all-in-one-ui + '[' true = true ']' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./01-curl.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./04-test-ui-config.yaml + start_test production + '[' 1 -ne 1 ']' + test_name=production + echo =========================================================================== =========================================================================== + info 'Rendering files for test production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test production\e[0m' Rendering files for test production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build/allinone + '[' allinone '!=' _build ']' + cd .. + mkdir -p production + cd production + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + [[ true = true ]] + [[ true = true ]] + render_install_jaeger production-ui production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + '[' true = true ']' + INSECURE=true + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-forbbiden-access.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-curl.yaml + INSECURE=true + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./05-check-disabled-security.yaml + ASSERT_PRESENT=false + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./06-check-NO-gaID.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./08-check-gaID.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running ui E2E tests' Running ui E2E tests + cd tests/e2e/ui/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1113221797 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com:6443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 3 tests === RUN kuttl/harness === RUN kuttl/harness/allinone === PAUSE kuttl/harness/allinone === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/production === PAUSE kuttl/harness/production === CONT kuttl/harness/allinone logger.go:42: 14:09:50 | allinone | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 14:09:50 | allinone | Creating namespace: kuttl-test-noble-mastiff logger.go:42: 14:09:50 | allinone/0-install | starting test step 0-install logger.go:42: 14:09:51 | allinone/0-install | Jaeger:kuttl-test-noble-mastiff/all-in-one-ui created logger.go:42: 14:09:55 | allinone/0-install | test step completed 0-install logger.go:42: 14:09:55 | allinone/1-curl | starting test step 1-curl logger.go:42: 14:09:55 | allinone/1-curl | running command: [./ensure-ingress-host.sh] logger.go:42: 14:09:55 | allinone/1-curl | Checking the Ingress host value was populated logger.go:42: 14:09:55 | allinone/1-curl | Try number 0 logger.go:42: 14:09:55 | allinone/1-curl | Hostname is all-in-one-ui-kuttl-test-noble-mastiff.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com logger.go:42: 14:09:55 | allinone/1-curl | running command: [sh -c ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE all-in-one-ui] logger.go:42: 14:09:55 | allinone/1-curl | Checking an expected HTTP response logger.go:42: 14:09:55 | allinone/1-curl | Running in OpenShift logger.go:42: 14:09:55 | allinone/1-curl | User not provided. Getting the token... logger.go:42: 14:09:56 | allinone/1-curl | Warning: resource jaegers/all-in-one-ui is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 14:10:03 | allinone/1-curl | Try number 1/30 the https://all-in-one-ui-kuttl-test-noble-mastiff.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search logger.go:42: 14:10:03 | allinone/1-curl | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 14:10:03 | allinone/1-curl | Try number 2/30 the https://all-in-one-ui-kuttl-test-noble-mastiff.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search logger.go:42: 14:10:24 | allinone/1-curl | HTTP response is 503. 200 expected. Waiting 10 s logger.go:42: 14:10:34 | allinone/1-curl | Try number 3/30 the https://all-in-one-ui-kuttl-test-noble-mastiff.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search logger.go:42: 14:10:34 | allinone/1-curl | curl response asserted properly logger.go:42: 14:10:34 | allinone/1-curl | test step completed 1-curl logger.go:42: 14:10:34 | allinone/2-delete | starting test step 2-delete logger.go:42: 14:10:34 | allinone/2-delete | Jaeger:kuttl-test-noble-mastiff/all-in-one-ui created logger.go:42: 14:10:34 | allinone/2-delete | test step completed 2-delete logger.go:42: 14:10:34 | allinone/3-install | starting test step 3-install logger.go:42: 14:10:34 | allinone/3-install | Jaeger:kuttl-test-noble-mastiff/all-in-one-ui updated logger.go:42: 14:10:34 | allinone/3-install | test step completed 3-install logger.go:42: 14:10:34 | allinone/4-test-ui-config | starting test step 4-test-ui-config logger.go:42: 14:10:34 | allinone/4-test-ui-config | running command: [./ensure-ingress-host.sh] logger.go:42: 14:10:34 | allinone/4-test-ui-config | Checking the Ingress host value was populated logger.go:42: 14:10:34 | allinone/4-test-ui-config | Try number 0 logger.go:42: 14:10:34 | allinone/4-test-ui-config | error: error executing jsonpath "{.items[0].status.ingress[0].host}": Error executing template: array index out of bounds: index 0, length 0. Printing more information for debugging the template: logger.go:42: 14:10:34 | allinone/4-test-ui-config | template was: logger.go:42: 14:10:34 | allinone/4-test-ui-config | {.items[0].status.ingress[0].host} logger.go:42: 14:10:34 | allinone/4-test-ui-config | object given to jsonpath engine was: logger.go:42: 14:10:34 | allinone/4-test-ui-config | map[string]interface {}{"apiVersion":"v1", "items":[]interface {}{}, "kind":"List", "metadata":map[string]interface {}{"resourceVersion":""}} logger.go:42: 14:10:34 | allinone/4-test-ui-config | logger.go:42: 14:10:34 | allinone/4-test-ui-config | logger.go:42: 14:10:44 | allinone/4-test-ui-config | Try number 1 logger.go:42: 14:10:44 | allinone/4-test-ui-config | error: error executing jsonpath "{.items[0].status.ingress[0].host}": Error executing template: array index out of bounds: index 0, length 0. Printing more information for debugging the template: logger.go:42: 14:10:44 | allinone/4-test-ui-config | template was: logger.go:42: 14:10:44 | allinone/4-test-ui-config | {.items[0].status.ingress[0].host} logger.go:42: 14:10:44 | allinone/4-test-ui-config | object given to jsonpath engine was: logger.go:42: 14:10:44 | allinone/4-test-ui-config | map[string]interface {}{"apiVersion":"v1", "items":[]interface {}{}, "kind":"List", "metadata":map[string]interface {}{"resourceVersion":""}} logger.go:42: 14:10:44 | allinone/4-test-ui-config | logger.go:42: 14:10:44 | allinone/4-test-ui-config | logger.go:42: 14:10:54 | allinone/4-test-ui-config | Try number 2 logger.go:42: 14:10:54 | allinone/4-test-ui-config | Hostname is all-in-one-ui-kuttl-test-noble-mastiff.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com logger.go:42: 14:10:54 | allinone/4-test-ui-config | running command: [sh -c ASSERT_PRESENT=true EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 14:10:55 | allinone/4-test-ui-config | time="2024-12-02T14:10:55Z" level=info msg="Querying https://all-in-one-ui-kuttl-test-noble-mastiff.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search..." logger.go:42: 14:10:55 | allinone/4-test-ui-config | time="2024-12-02T14:10:55Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 14:10:55 | allinone/4-test-ui-config | time="2024-12-02T14:10:55Z" level=info msg="Polling to https://all-in-one-ui-kuttl-test-noble-mastiff.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search" logger.go:42: 14:10:55 | allinone/4-test-ui-config | time="2024-12-02T14:10:55Z" level=info msg="Doing request number 0" logger.go:42: 14:10:55 | allinone/4-test-ui-config | time="2024-12-02T14:10:55Z" level=info msg="Content found and asserted!" logger.go:42: 14:10:55 | allinone/4-test-ui-config | time="2024-12-02T14:10:55Z" level=info msg="Success!" logger.go:42: 14:10:55 | allinone/4-test-ui-config | test step completed 4-test-ui-config logger.go:42: 14:10:55 | allinone | allinone events from ns kuttl-test-noble-mastiff: logger.go:42: 14:10:55 | allinone | 2024-12-02 14:09:55 +0000 UTC Normal Pod all-in-one-ui-787f8fb495-ncr67 Binding Scheduled Successfully assigned kuttl-test-noble-mastiff/all-in-one-ui-787f8fb495-ncr67 to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:10:55 | allinone | 2024-12-02 14:09:55 +0000 UTC Normal Pod all-in-one-ui-787f8fb495-ncr67 AddedInterface Add eth0 [10.128.2.74/23] from ovn-kubernetes multus logger.go:42: 14:10:55 | allinone | 2024-12-02 14:09:55 +0000 UTC Normal Pod all-in-one-ui-787f8fb495-ncr67.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 14:10:55 | allinone | 2024-12-02 14:09:55 +0000 UTC Normal Pod all-in-one-ui-787f8fb495-ncr67.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 14:10:55 | allinone | 2024-12-02 14:09:55 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-787f8fb495 SuccessfulCreate Created pod: all-in-one-ui-787f8fb495-ncr67 replicaset-controller logger.go:42: 14:10:55 | allinone | 2024-12-02 14:09:55 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-787f8fb495 to 1 deployment-controller logger.go:42: 14:10:55 | allinone | 2024-12-02 14:09:56 +0000 UTC Normal Pod all-in-one-ui-787f8fb495-ncr67.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 14:10:55 | allinone | 2024-12-02 14:09:56 +0000 UTC Normal Pod all-in-one-ui-787f8fb495-ncr67.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 14:10:55 | allinone | 2024-12-02 14:09:56 +0000 UTC Normal Pod all-in-one-ui-787f8fb495-ncr67.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 14:10:55 | allinone | 2024-12-02 14:09:56 +0000 UTC Normal Pod all-in-one-ui-787f8fb495-ncr67.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:00 +0000 UTC Normal Pod all-in-one-ui-787f8fb495-ncr67.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:00 +0000 UTC Normal Pod all-in-one-ui-787f8fb495-ncr67.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:00 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-787f8fb495 SuccessfulDelete Deleted pod: all-in-one-ui-787f8fb495-ncr67 replicaset-controller logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:00 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled down replica set all-in-one-ui-787f8fb495 to 0 from 1 deployment-controller logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:01 +0000 UTC Normal Pod all-in-one-ui-5f894965f5-t2d6j Binding Scheduled Successfully assigned kuttl-test-noble-mastiff/all-in-one-ui-5f894965f5-t2d6j to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:01 +0000 UTC Normal Pod all-in-one-ui-5f894965f5-t2d6j AddedInterface Add eth0 [10.128.2.75/23] from ovn-kubernetes multus logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:01 +0000 UTC Normal Pod all-in-one-ui-5f894965f5-t2d6j.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:01 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-5f894965f5 SuccessfulCreate Created pod: all-in-one-ui-5f894965f5-t2d6j replicaset-controller logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:01 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-5f894965f5 to 1 deployment-controller logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:02 +0000 UTC Normal Pod all-in-one-ui-5f894965f5-t2d6j.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:02 +0000 UTC Normal Pod all-in-one-ui-5f894965f5-t2d6j.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:02 +0000 UTC Normal Pod all-in-one-ui-5f894965f5-t2d6j.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:02 +0000 UTC Normal Pod all-in-one-ui-5f894965f5-t2d6j.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:02 +0000 UTC Normal Pod all-in-one-ui-5f894965f5-t2d6j.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:34 +0000 UTC Normal Pod all-in-one-ui-5f894965f5-t2d6j.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:34 +0000 UTC Normal Pod all-in-one-ui-5f894965f5-t2d6j.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:41 +0000 UTC Normal Pod all-in-one-ui-644d78854b-tk79h Binding Scheduled Successfully assigned kuttl-test-noble-mastiff/all-in-one-ui-644d78854b-tk79h to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:41 +0000 UTC Warning Pod all-in-one-ui-644d78854b-tk79h FailedMount MountVolume.SetUp failed for volume "all-in-one-ui-collector-tls-config-volume" : secret "all-in-one-ui-collector-headless-tls" not found kubelet logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:41 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-644d78854b SuccessfulCreate Created pod: all-in-one-ui-644d78854b-tk79h replicaset-controller logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:41 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-644d78854b to 1 deployment-controller logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:42 +0000 UTC Normal Pod all-in-one-ui-644d78854b-tk79h AddedInterface Add eth0 [10.128.2.76/23] from ovn-kubernetes multus logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:42 +0000 UTC Normal Pod all-in-one-ui-644d78854b-tk79h.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1fa5ed13cc8a023f1e987f6cafe86adc195c373cc2b774539df6d0fd02b780a7" already present on machine kubelet logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:42 +0000 UTC Normal Pod all-in-one-ui-644d78854b-tk79h.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 14:10:55 | allinone | 2024-12-02 14:10:42 +0000 UTC Normal Pod all-in-one-ui-644d78854b-tk79h.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 14:10:55 | allinone | Deleting namespace: kuttl-test-noble-mastiff === CONT kuttl/harness/production logger.go:42: 14:11:02 | production | Ignoring add-tracking-id.yaml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 14:11:02 | production | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 14:11:02 | production | Creating namespace: kuttl-test-immune-bunny logger.go:42: 14:11:02 | production/1-install | starting test step 1-install logger.go:42: 14:11:02 | production/1-install | Jaeger:kuttl-test-immune-bunny/production-ui created logger.go:42: 14:11:46 | production/1-install | test step completed 1-install logger.go:42: 14:11:46 | production/2-check-forbbiden-access | starting test step 2-check-forbbiden-access logger.go:42: 14:11:46 | production/2-check-forbbiden-access | running command: [./ensure-ingress-host.sh] logger.go:42: 14:11:46 | production/2-check-forbbiden-access | Checking the Ingress host value was populated logger.go:42: 14:11:46 | production/2-check-forbbiden-access | Try number 0 logger.go:42: 14:11:46 | production/2-check-forbbiden-access | Hostname is production-ui-kuttl-test-immune-bunny.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com logger.go:42: 14:11:46 | production/2-check-forbbiden-access | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE production-ui] logger.go:42: 14:11:47 | production/2-check-forbbiden-access | Checking an expected HTTP response logger.go:42: 14:11:47 | production/2-check-forbbiden-access | Running in OpenShift logger.go:42: 14:11:47 | production/2-check-forbbiden-access | Not using any secret logger.go:42: 14:11:47 | production/2-check-forbbiden-access | Try number 1/30 the https://production-ui-kuttl-test-immune-bunny.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search logger.go:42: 14:11:47 | production/2-check-forbbiden-access | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 14:11:47 | production/2-check-forbbiden-access | Try number 2/30 the https://production-ui-kuttl-test-immune-bunny.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search logger.go:42: 14:11:47 | production/2-check-forbbiden-access | HTTP response is 503. 403 expected. Waiting 10 s logger.go:42: 14:11:57 | production/2-check-forbbiden-access | Try number 3/30 the https://production-ui-kuttl-test-immune-bunny.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search logger.go:42: 14:11:57 | production/2-check-forbbiden-access | curl response asserted properly logger.go:42: 14:11:57 | production/2-check-forbbiden-access | test step completed 2-check-forbbiden-access logger.go:42: 14:11:57 | production/3-curl | starting test step 3-curl logger.go:42: 14:11:57 | production/3-curl | running command: [./ensure-ingress-host.sh] logger.go:42: 14:11:57 | production/3-curl | Checking the Ingress host value was populated logger.go:42: 14:11:57 | production/3-curl | Try number 0 logger.go:42: 14:11:57 | production/3-curl | Hostname is production-ui-kuttl-test-immune-bunny.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com logger.go:42: 14:11:57 | production/3-curl | running command: [sh -c ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE production-ui] logger.go:42: 14:11:57 | production/3-curl | Checking an expected HTTP response logger.go:42: 14:11:57 | production/3-curl | Running in OpenShift logger.go:42: 14:11:57 | production/3-curl | User not provided. Getting the token... logger.go:42: 14:11:59 | production/3-curl | Warning: resource jaegers/production-ui is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 14:12:05 | production/3-curl | Try number 1/30 the https://production-ui-kuttl-test-immune-bunny.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search logger.go:42: 14:12:05 | production/3-curl | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 14:12:05 | production/3-curl | Try number 2/30 the https://production-ui-kuttl-test-immune-bunny.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search logger.go:42: 14:12:05 | production/3-curl | HTTP response is 503. 200 expected. Waiting 10 s logger.go:42: 14:12:15 | production/3-curl | Try number 3/30 the https://production-ui-kuttl-test-immune-bunny.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search logger.go:42: 14:12:15 | production/3-curl | curl response asserted properly logger.go:42: 14:12:15 | production/3-curl | test step completed 3-curl logger.go:42: 14:12:15 | production/4-install | starting test step 4-install logger.go:42: 14:12:15 | production/4-install | Jaeger:kuttl-test-immune-bunny/production-ui updated logger.go:42: 14:12:15 | production/4-install | test step completed 4-install logger.go:42: 14:12:15 | production/5-check-disabled-security | starting test step 5-check-disabled-security logger.go:42: 14:12:15 | production/5-check-disabled-security | running command: [./ensure-ingress-host.sh] logger.go:42: 14:12:15 | production/5-check-disabled-security | Checking the Ingress host value was populated logger.go:42: 14:12:15 | production/5-check-disabled-security | Try number 0 logger.go:42: 14:12:15 | production/5-check-disabled-security | Hostname is production-ui-kuttl-test-immune-bunny.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com logger.go:42: 14:12:15 | production/5-check-disabled-security | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE production-ui] logger.go:42: 14:12:16 | production/5-check-disabled-security | Checking an expected HTTP response logger.go:42: 14:12:16 | production/5-check-disabled-security | Running in OpenShift logger.go:42: 14:12:16 | production/5-check-disabled-security | Not using any secret logger.go:42: 14:12:16 | production/5-check-disabled-security | Try number 1/30 the https://production-ui-kuttl-test-immune-bunny.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search logger.go:42: 14:12:16 | production/5-check-disabled-security | Something failed while trying to contact the server. Trying insecure mode logger.go:42: 14:12:16 | production/5-check-disabled-security | Try number 2/30 the https://production-ui-kuttl-test-immune-bunny.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search logger.go:42: 14:12:16 | production/5-check-disabled-security | HTTP response is 403. 200 expected. Waiting 10 s logger.go:42: 14:12:26 | production/5-check-disabled-security | Try number 3/30 the https://production-ui-kuttl-test-immune-bunny.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search logger.go:42: 14:12:26 | production/5-check-disabled-security | curl response asserted properly logger.go:42: 14:12:26 | production/5-check-disabled-security | test step completed 5-check-disabled-security logger.go:42: 14:12:26 | production/6-check-NO-gaID | starting test step 6-check-NO-gaID logger.go:42: 14:12:26 | production/6-check-NO-gaID | running command: [./ensure-ingress-host.sh] logger.go:42: 14:12:26 | production/6-check-NO-gaID | Checking the Ingress host value was populated logger.go:42: 14:12:26 | production/6-check-NO-gaID | Try number 0 logger.go:42: 14:12:26 | production/6-check-NO-gaID | Hostname is production-ui-kuttl-test-immune-bunny.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com logger.go:42: 14:12:26 | production/6-check-NO-gaID | running command: [sh -c ASSERT_PRESENT=false EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 14:12:26 | production/6-check-NO-gaID | time="2024-12-02T14:12:26Z" level=info msg="Querying https://production-ui-kuttl-test-immune-bunny.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search..." logger.go:42: 14:12:26 | production/6-check-NO-gaID | time="2024-12-02T14:12:26Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 14:12:26 | production/6-check-NO-gaID | time="2024-12-02T14:12:26Z" level=info msg="Polling to https://production-ui-kuttl-test-immune-bunny.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search" logger.go:42: 14:12:26 | production/6-check-NO-gaID | time="2024-12-02T14:12:26Z" level=info msg="Doing request number 0" logger.go:42: 14:12:27 | production/6-check-NO-gaID | time="2024-12-02T14:12:27Z" level=info msg="Content not found and asserted it was not found!" logger.go:42: 14:12:27 | production/6-check-NO-gaID | time="2024-12-02T14:12:27Z" level=info msg="Success!" logger.go:42: 14:12:27 | production/6-check-NO-gaID | test step completed 6-check-NO-gaID logger.go:42: 14:12:27 | production/7-add-tracking-id | starting test step 7-add-tracking-id logger.go:42: 14:12:27 | production/7-add-tracking-id | running command: [sh -c kubectl apply -f add-tracking-id.yaml -n $NAMESPACE] logger.go:42: 14:12:27 | production/7-add-tracking-id | jaeger.jaegertracing.io/production-ui configured logger.go:42: 14:12:27 | production/7-add-tracking-id | test step completed 7-add-tracking-id logger.go:42: 14:12:27 | production/8-check-gaID | starting test step 8-check-gaID logger.go:42: 14:12:27 | production/8-check-gaID | running command: [./ensure-ingress-host.sh] logger.go:42: 14:12:27 | production/8-check-gaID | Checking the Ingress host value was populated logger.go:42: 14:12:27 | production/8-check-gaID | Try number 0 logger.go:42: 14:12:27 | production/8-check-gaID | Hostname is production-ui-kuttl-test-immune-bunny.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com logger.go:42: 14:12:27 | production/8-check-gaID | running command: [sh -c ASSERT_PRESENT=true EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 14:12:28 | production/8-check-gaID | time="2024-12-02T14:12:28Z" level=info msg="Querying https://production-ui-kuttl-test-immune-bunny.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search..." logger.go:42: 14:12:28 | production/8-check-gaID | time="2024-12-02T14:12:28Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 14:12:28 | production/8-check-gaID | time="2024-12-02T14:12:28Z" level=info msg="Polling to https://production-ui-kuttl-test-immune-bunny.apps.ci-op-smp68mmf-f1a62.cspilp.interop.ccitredhat.com/search" logger.go:42: 14:12:28 | production/8-check-gaID | time="2024-12-02T14:12:28Z" level=info msg="Doing request number 0" logger.go:42: 14:12:28 | production/8-check-gaID | time="2024-12-02T14:12:28Z" level=warning msg="Found: false . Assert: true" logger.go:42: 14:12:28 | production/8-check-gaID | time="2024-12-02T14:12:28Z" level=warning msg="The condition of the test function was not accomplished" logger.go:42: 14:12:36 | production/8-check-gaID | time="2024-12-02T14:12:36Z" level=info msg="Doing request number 1" logger.go:42: 14:12:56 | production/8-check-gaID | time="2024-12-02T14:12:56Z" level=info msg="Content found and asserted!" logger.go:42: 14:12:56 | production/8-check-gaID | time="2024-12-02T14:12:56Z" level=info msg="Success!" logger.go:42: 14:12:56 | production/8-check-gaID | test step completed 8-check-gaID logger.go:42: 14:12:56 | production | production events from ns kuttl-test-immune-bunny: logger.go:42: 14:12:56 | production | 2024-12-02 14:11:15 +0000 UTC Normal ReplicaSet.apps elasticsearch-cdm-kuttltestimmunebunnyproductionui-1-5d759b6b9f SuccessfulCreate Created pod: elasticsearch-cdm-kuttltestimmunebunnyproductionui-1-5d759chsk2 replicaset-controller logger.go:42: 14:12:56 | production | 2024-12-02 14:11:15 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmunebunnyproductionui-1-5d759chsk2 Binding Scheduled Successfully assigned kuttl-test-immune-bunny/elasticsearch-cdm-kuttltestimmunebunnyproductionui-1-5d759chsk2 to ip-10-0-84-96.ec2.internal default-scheduler logger.go:42: 14:12:56 | production | 2024-12-02 14:11:15 +0000 UTC Normal Deployment.apps elasticsearch-cdm-kuttltestimmunebunnyproductionui-1 ScalingReplicaSet Scaled up replica set elasticsearch-cdm-kuttltestimmunebunnyproductionui-1-5d759b6b9f to 1 deployment-controller logger.go:42: 14:12:56 | production | 2024-12-02 14:11:16 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmunebunnyproductionui-1-5d759chsk2 AddedInterface Add eth0 [10.128.2.77/23] from ovn-kubernetes multus logger.go:42: 14:12:56 | production | 2024-12-02 14:11:16 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmunebunnyproductionui-1-5d759chsk2.spec.containers{elasticsearch} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch6-rhel9@sha256:8e4fbea4983cd58352349ca291383169b286bc166fad95a87807552ca43335e6" already present on machine kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:11:16 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmunebunnyproductionui-1-5d759chsk2.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:11:16 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmunebunnyproductionui-1-5d759chsk2.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:11:16 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmunebunnyproductionui-1-5d759chsk2.spec.containers{proxy} Pulled Container image "registry.redhat.io/openshift-logging/elasticsearch-proxy-rhel9@sha256:d68824b0b2c84db8e33edf9ab344eb684c4a7ebd7ef162bbc309043adcb28e6b" already present on machine kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:11:16 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmunebunnyproductionui-1-5d759chsk2.spec.containers{proxy} Created Created container proxy kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:11:16 +0000 UTC Normal Pod elasticsearch-cdm-kuttltestimmunebunnyproductionui-1-5d759chsk2.spec.containers{proxy} Started Started container proxy kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:11:31 +0000 UTC Warning Pod elasticsearch-cdm-kuttltestimmunebunnyproductionui-1-5d759chsk2.spec.containers{elasticsearch} Unhealthy Readiness probe failed: kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:11:43 +0000 UTC Normal Pod production-ui-collector-55cf5b74f4-9gnxt Binding Scheduled Successfully assigned kuttl-test-immune-bunny/production-ui-collector-55cf5b74f4-9gnxt to ip-10-0-123-161.ec2.internal default-scheduler logger.go:42: 14:12:56 | production | 2024-12-02 14:11:43 +0000 UTC Normal ReplicaSet.apps production-ui-collector-55cf5b74f4 SuccessfulCreate Created pod: production-ui-collector-55cf5b74f4-9gnxt replicaset-controller logger.go:42: 14:12:56 | production | 2024-12-02 14:11:43 +0000 UTC Normal Deployment.apps production-ui-collector ScalingReplicaSet Scaled up replica set production-ui-collector-55cf5b74f4 to 1 deployment-controller logger.go:42: 14:12:56 | production | 2024-12-02 14:11:43 +0000 UTC Normal Pod production-ui-query-597778b854-kskxq Binding Scheduled Successfully assigned kuttl-test-immune-bunny/production-ui-query-597778b854-kskxq to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 14:12:56 | production | 2024-12-02 14:11:43 +0000 UTC Warning Pod production-ui-query-597778b854-kskxq FailedMount MountVolume.SetUp failed for volume "production-ui-ui-oauth-proxy-tls" : secret "production-ui-ui-oauth-proxy-tls" not found kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:11:43 +0000 UTC Normal ReplicaSet.apps production-ui-query-597778b854 SuccessfulCreate Created pod: production-ui-query-597778b854-kskxq replicaset-controller logger.go:42: 14:12:56 | production | 2024-12-02 14:11:43 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-597778b854 to 1 deployment-controller logger.go:42: 14:12:56 | production | 2024-12-02 14:11:44 +0000 UTC Normal Pod production-ui-collector-55cf5b74f4-9gnxt AddedInterface Add eth0 [10.131.0.81/23] from ovn-kubernetes multus logger.go:42: 14:12:56 | production | 2024-12-02 14:11:44 +0000 UTC Normal Pod production-ui-collector-55cf5b74f4-9gnxt.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:44686d560aa27cae8ff8693f88c4cb6e2edf1737010ec1f80709cb42250b729d" already present on machine kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:11:44 +0000 UTC Normal Pod production-ui-collector-55cf5b74f4-9gnxt.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:11:44 +0000 UTC Normal Pod production-ui-collector-55cf5b74f4-9gnxt.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:11:44 +0000 UTC Normal Pod production-ui-query-597778b854-kskxq AddedInterface Add eth0 [10.129.2.66/23] from ovn-kubernetes multus logger.go:42: 14:12:56 | production | 2024-12-02 14:11:44 +0000 UTC Normal Pod production-ui-query-597778b854-kskxq.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:11:44 +0000 UTC Normal Pod production-ui-query-597778b854-kskxq.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:11:44 +0000 UTC Normal Pod production-ui-query-597778b854-kskxq.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:11:44 +0000 UTC Normal Pod production-ui-query-597778b854-kskxq.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:11:45 +0000 UTC Normal Pod production-ui-query-597778b854-kskxq.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:11:45 +0000 UTC Normal Pod production-ui-query-597778b854-kskxq.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:11:45 +0000 UTC Normal Pod production-ui-query-597778b854-kskxq.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:11:45 +0000 UTC Normal Pod production-ui-query-597778b854-kskxq.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:11:45 +0000 UTC Normal Pod production-ui-query-597778b854-kskxq.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:11:58 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 14:12:56 | production | 2024-12-02 14:11:58 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 14:12:56 | production | 2024-12-02 14:11:58 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 14:12:56 | production | 2024-12-02 14:12:00 +0000 UTC Normal Pod production-ui-query-597778b854-kskxq.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:00 +0000 UTC Normal Pod production-ui-query-597778b854-kskxq.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:00 +0000 UTC Normal Pod production-ui-query-597778b854-kskxq.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:00 +0000 UTC Normal ReplicaSet.apps production-ui-query-597778b854 SuccessfulDelete Deleted pod: production-ui-query-597778b854-kskxq replicaset-controller logger.go:42: 14:12:56 | production | 2024-12-02 14:12:00 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-597778b854 to 0 from 1 deployment-controller logger.go:42: 14:12:56 | production | 2024-12-02 14:12:01 +0000 UTC Normal Pod production-ui-query-64dfbbf56d-kx64m Binding Scheduled Successfully assigned kuttl-test-immune-bunny/production-ui-query-64dfbbf56d-kx64m to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 14:12:56 | production | 2024-12-02 14:12:01 +0000 UTC Normal ReplicaSet.apps production-ui-query-64dfbbf56d SuccessfulCreate Created pod: production-ui-query-64dfbbf56d-kx64m replicaset-controller logger.go:42: 14:12:56 | production | 2024-12-02 14:12:01 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-64dfbbf56d to 1 deployment-controller logger.go:42: 14:12:56 | production | 2024-12-02 14:12:02 +0000 UTC Normal Pod production-ui-query-64dfbbf56d-kx64m AddedInterface Add eth0 [10.129.2.67/23] from ovn-kubernetes multus logger.go:42: 14:12:56 | production | 2024-12-02 14:12:02 +0000 UTC Normal Pod production-ui-query-64dfbbf56d-kx64m.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:02 +0000 UTC Normal Pod production-ui-query-64dfbbf56d-kx64m.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:02 +0000 UTC Normal Pod production-ui-query-64dfbbf56d-kx64m.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:02 +0000 UTC Normal Pod production-ui-query-64dfbbf56d-kx64m.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:02 +0000 UTC Normal Pod production-ui-query-64dfbbf56d-kx64m.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:02 +0000 UTC Normal Pod production-ui-query-64dfbbf56d-kx64m.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:02 +0000 UTC Normal Pod production-ui-query-64dfbbf56d-kx64m.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:02 +0000 UTC Normal Pod production-ui-query-64dfbbf56d-kx64m.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:02 +0000 UTC Normal Pod production-ui-query-64dfbbf56d-kx64m.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:13 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 14:12:56 | production | 2024-12-02 14:12:13 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get memory utilization: missing request for memory in container jaeger-collector of Pod production-ui-collector-55cf5b74f4-9gnxt horizontal-pod-autoscaler logger.go:42: 14:12:56 | production | 2024-12-02 14:12:13 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: did not receive metrics for targeted pods (pods might be unready) horizontal-pod-autoscaler logger.go:42: 14:12:56 | production | 2024-12-02 14:12:17 +0000 UTC Normal Pod production-ui-query-64dfbbf56d-kx64m.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:17 +0000 UTC Normal Pod production-ui-query-64dfbbf56d-kx64m.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:17 +0000 UTC Normal Pod production-ui-query-64dfbbf56d-kx64m.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:17 +0000 UTC Normal ReplicaSet.apps production-ui-query-64dfbbf56d SuccessfulDelete Deleted pod: production-ui-query-64dfbbf56d-kx64m replicaset-controller logger.go:42: 14:12:56 | production | 2024-12-02 14:12:17 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-64dfbbf56d to 0 from 1 deployment-controller logger.go:42: 14:12:56 | production | 2024-12-02 14:12:18 +0000 UTC Normal Pod production-ui-query-649d97bfbc-z56hr Binding Scheduled Successfully assigned kuttl-test-immune-bunny/production-ui-query-649d97bfbc-z56hr to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 14:12:56 | production | 2024-12-02 14:12:18 +0000 UTC Normal ReplicaSet.apps production-ui-query-649d97bfbc SuccessfulCreate Created pod: production-ui-query-649d97bfbc-z56hr replicaset-controller logger.go:42: 14:12:56 | production | 2024-12-02 14:12:18 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-649d97bfbc to 1 deployment-controller logger.go:42: 14:12:56 | production | 2024-12-02 14:12:19 +0000 UTC Normal Pod production-ui-query-649d97bfbc-z56hr AddedInterface Add eth0 [10.129.2.68/23] from ovn-kubernetes multus logger.go:42: 14:12:56 | production | 2024-12-02 14:12:19 +0000 UTC Normal Pod production-ui-query-649d97bfbc-z56hr.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:19 +0000 UTC Normal Pod production-ui-query-649d97bfbc-z56hr.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:19 +0000 UTC Normal Pod production-ui-query-649d97bfbc-z56hr.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:19 +0000 UTC Normal Pod production-ui-query-649d97bfbc-z56hr.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:19 +0000 UTC Normal Pod production-ui-query-649d97bfbc-z56hr.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:19 +0000 UTC Normal Pod production-ui-query-649d97bfbc-z56hr.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:28 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod production-ui-collector-55cf5b74f4-9gnxt horizontal-pod-autoscaler logger.go:42: 14:12:56 | production | 2024-12-02 14:12:28 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: missing request for cpu in container jaeger-collector of Pod production-ui-collector-55cf5b74f4-9gnxt horizontal-pod-autoscaler logger.go:42: 14:12:56 | production | 2024-12-02 14:12:29 +0000 UTC Normal Pod production-ui-query-649d97bfbc-z56hr.spec.containers{jaeger-query} Killing Stopping container jaeger-query kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:29 +0000 UTC Normal Pod production-ui-query-649d97bfbc-z56hr.spec.containers{jaeger-agent} Killing Stopping container jaeger-agent kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:29 +0000 UTC Warning Pod production-ui-query-649d97bfbc-z56hr.spec.containers{jaeger-query} Unhealthy Readiness probe failed: Get "http://10.129.2.68:16687/": dial tcp 10.129.2.68:16687: connect: connection refused kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:29 +0000 UTC Warning Pod production-ui-query-649d97bfbc-z56hr.spec.containers{jaeger-agent} Unhealthy Readiness probe failed: Get "http://10.129.2.68:14271/": dial tcp 10.129.2.68:14271: connect: connection refused kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:29 +0000 UTC Normal ReplicaSet.apps production-ui-query-649d97bfbc SuccessfulDelete Deleted pod: production-ui-query-649d97bfbc-z56hr replicaset-controller logger.go:42: 14:12:56 | production | 2024-12-02 14:12:29 +0000 UTC Normal Pod production-ui-query-6f76c85ccd-4p6hs Binding Scheduled Successfully assigned kuttl-test-immune-bunny/production-ui-query-6f76c85ccd-4p6hs to ip-10-0-19-23.ec2.internal default-scheduler logger.go:42: 14:12:56 | production | 2024-12-02 14:12:29 +0000 UTC Normal ReplicaSet.apps production-ui-query-6f76c85ccd SuccessfulCreate Created pod: production-ui-query-6f76c85ccd-4p6hs replicaset-controller logger.go:42: 14:12:56 | production | 2024-12-02 14:12:29 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled down replica set production-ui-query-649d97bfbc to 0 from 1 deployment-controller logger.go:42: 14:12:56 | production | 2024-12-02 14:12:29 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-6f76c85ccd to 1 deployment-controller logger.go:42: 14:12:56 | production | 2024-12-02 14:12:30 +0000 UTC Normal Pod production-ui-query-6f76c85ccd-4p6hs AddedInterface Add eth0 [10.129.2.69/23] from ovn-kubernetes multus logger.go:42: 14:12:56 | production | 2024-12-02 14:12:30 +0000 UTC Normal Pod production-ui-query-6f76c85ccd-4p6hs.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:24a83160aa930e1b72c2a2442a33b28af2b06c24b058e09afcd0d495a8066d6d" already present on machine kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:30 +0000 UTC Normal Pod production-ui-query-6f76c85ccd-4p6hs.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:30 +0000 UTC Normal Pod production-ui-query-6f76c85ccd-4p6hs.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:30 +0000 UTC Normal Pod production-ui-query-6f76c85ccd-4p6hs.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:986580d136bd26bf3d626578d2b630dd66cd83206fe5bba124f052a1a8d1d0ec" already present on machine kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:30 +0000 UTC Normal Pod production-ui-query-6f76c85ccd-4p6hs.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 14:12:56 | production | 2024-12-02 14:12:30 +0000 UTC Normal Pod production-ui-query-6f76c85ccd-4p6hs.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 14:12:56 | production | Deleting namespace: kuttl-test-immune-bunny === CONT kuttl/harness/artifacts logger.go:42: 14:13:03 | artifacts | Creating namespace: kuttl-test-delicate-louse logger.go:42: 14:13:03 | artifacts | artifacts events from ns kuttl-test-delicate-louse: logger.go:42: 14:13:03 | artifacts | Deleting namespace: kuttl-test-delicate-louse === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (199.00s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/allinone (71.19s) --- PASS: kuttl/harness/production (121.72s) --- PASS: kuttl/harness/artifacts (5.90s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name ui --report --output /logs/artifacts/ui.xml ./artifacts/kuttl-report.xml time="2024-12-02T14:13:10Z" level=debug msg="Setting a new name for the test suites" time="2024-12-02T14:13:10Z" level=debug msg="Removing 'artifacts' TestCase" time="2024-12-02T14:13:10Z" level=debug msg="normalizing test case names" time="2024-12-02T14:13:10Z" level=debug msg="ui/allinone -> ui_allinone" time="2024-12-02T14:13:10Z" level=debug msg="ui/production -> ui_production" time="2024-12-02T14:13:10Z" level=debug msg="ui/artifacts -> ui_artifacts" +---------------+--------+ | NAME | RESULT | +---------------+--------+ | ui_allinone | passed | | ui_production | passed | | ui_artifacts | passed | +---------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests'